diff --git a/.github/project.yml b/.github/project.yml index 633f883e..e787d911 100644 --- a/.github/project.yml +++ b/.github/project.yml @@ -1,3 +1,4 @@ +# Retriggering release again release: - current-version: 7.0.0-alpha5.1 - next-version: 7.0.0-SNAPSHOT + current-version: 7.0.0.Final + next-version: 8.0.0-SNAPSHOT diff --git a/.github/workflows/maven-verify.yml b/.github/workflows/maven-verify.yml index a9f5077c..2070974b 100644 --- a/.github/workflows/maven-verify.yml +++ b/.github/workflows/maven-verify.yml @@ -26,3 +26,7 @@ jobs: - name: Verify with Maven run: | mvn -B -f pom.xml clean install verify + + - name: Verify Examples with Maven + run: | + mvn -B -f examples/pom.xml clean install verify diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index ce904c75..9d46ce2a 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -11,7 +11,7 @@ jobs: name: pre release steps: - - uses: radcortez/project-metadata-action@master + - uses: radcortez/project-metadata-action@main name: retrieve project metadata id: metadata with: @@ -22,4 +22,4 @@ jobs: if: contains(steps.metadata.outputs.current-version, 'SNAPSHOT') run: | echo '::error::Cannot release a SNAPSHOT version.' - exit 1 \ No newline at end of file + exit 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2d002124..cb4ec5eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,7 +10,7 @@ jobs: release: runs-on: ubuntu-latest name: release - if: ${{github.event.pull_request.merged == true}} + if: ${{ github.event.pull_request.merged == true }} steps: - uses: radcortez/project-metadata-action@main @@ -51,7 +51,7 @@ jobs: cat release.properties git checkout ${{github.base_ref}} git rebase release - mvn -B release:perform -Darguments=-DperformRelease -DperformRelease -Prelease + mvn -B release:perform -Prelease -Darguments="-DperformRelease" env: MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} diff --git a/README.md b/README.md index c0b4df70..caf87812 100644 --- a/README.md +++ b/README.md @@ -8,11 +8,11 @@ Provides the Java API for the [Serverless Workflow Specification](https://github With the SDK you can: * Read workflow JSON and YAML definitions -* Write workflow in JSON and YAML format. +* Write workflow definitions in JSON and YAML formats. +* Test your workflow definitions using the reference implementation. -Serverless Workflow Java SDK is **not** a workflow runtime implementation but can be used by Java runtime implementations to parse workflow definitions. -### Status +## Status | Latest Releases | Conformance to spec version | | :---: | :---: | @@ -25,17 +25,18 @@ Serverless Workflow Java SDK is **not** a workflow runtime implementation but ca Note that 6.0.0.Final, which will be the one for specification version 0.9, is skipped intentionally in case someone want to work on it. -### JDK Version +## JDK Version | SDK Version | JDK Version | | :---: | :---: | +| 7.0.0 and after | 17 | | 5.0.0 and after | 11 | | 4.0.x and before | 8 | -### Getting Started +## Getting Started -#### Building SNAPSHOT locally +### Building SNAPSHOT locally To build project and run tests locally: @@ -47,7 +48,7 @@ mvn clean install The project uses [Google's code styleguide](https://google.github.io/styleguide/javaguide.html). Your changes should be automatically formatted during the build. -#### Maven projects: +### Maven projects: Add the following dependencies to your pom.xml `dependencies` section: @@ -55,23 +56,32 @@ Add the following dependencies to your pom.xml `dependencies` section: io.serverlessworkflow serverlessworkflow-api - 7.0.0-SNAPSHOT + 7.0.0.Final ``` -#### Gradle projects: +### Gradle projects: Add the following dependencies to your build.gradle `dependencies` section: ```text -implementation("io.serverlessworkflow:serverlessworkflow-api:7.0.0-SNAPSHOT") +implementation("io.serverlessworkflow:serverlessworkflow-api:7.0.0.Final") ``` -### How to Use +## How to Use -#### Creating from JSON/YAML source +There are, roughly speaking, two kind of users of this SDK: + * Those ones interested on implementing their own runtime using Java. + * Those ones interested on using the provided runtime reference implementation. -You can create a Workflow instance from JSON/YAML source: +### Implementing your own runtime + +For those ones interested on implementing their own runtime, this SDK provides an easy way to load an in memory representation of a given workflow definition. +This in-memory representation consists of a hierarchy of POJOS directly generated from the Serverless Workflow specification [schema](api/src/main/resources/schema/workflow.yaml), which ensures the internal representation is aligned with the specification schema. The root of the hierarchy is `io.serverlessworkflow.api.types.Workflow` class + +### Reading workflow definition from JSON/YAML source + +You can read a Workflow definition from JSON/YAML source: Let's say you have a simple YAML based workflow definition in a file name `simple.yaml` located in your working dir: @@ -93,7 +103,7 @@ do: ``` -To parse it and create a Workflow instance you can do: +To parse it and get a Workflow instance you can do: ``` java @@ -102,10 +112,20 @@ try (InputStream in = new FileInputStream("simple.yaml")) { // Once you have the Workflow instance you can use its API to inspect it } ``` +By default, Workflows are not validated against the schema (performance being the priority). If you want to enable validation, you can do that by using: + +``` java +try (InputStream in = new FileInputStream("simple.yaml")) { + Workflow workflow = WorkflowReader.validation().readWorkflow (in, WorkflowFormat.YAML); + // Once you have the Workflow instance you can use its API to inspect it +} +``` -#### Writing a workflow +For additional reading helper methods, including the one to read a workflow definition from classpath, check [WorkflowReader](api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java) class. -Given a workflow definition, you can store it using JSON or YAML format. +### Writing workflow definition to a JSON/YAML target + +Given a Workflow instance, you can store it using JSON or YAML format. For example, to store a workflow using json format in a file called `simple.json`, you write ``` java @@ -113,4 +133,10 @@ try (OutputStream out = new FileOutputStream("simple.json")) { WorkflowWriter.writeWorkflow(out, workflow, WorkflowFormat.JSON); } -``` \ No newline at end of file +``` +For additional writing helper methods, check [WorkflowWriter](api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java) class. + +### Reference implementation + +The reference implementation provides a ready-to-use runtime that supports the Serverless Workflow Specification. It includes a workflow execution engine, validation utilities, and illustrative examples to help you quickly test and deploy your workflows. For details on usage, configuration, and supported features, see [readme](impl/README.md). + diff --git a/api/pom.xml b/api/pom.xml index 466a2754..69f8c2f5 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -4,7 +4,7 @@ io.serverlessworkflow serverlessworkflow-parent - 7.0.0-alpha5.1 + 8.0.0-SNAPSHOT serverlessworkflow-api @@ -21,6 +21,10 @@ com.fasterxml.jackson.core jackson-core + + com.networknt + json-schema-validator + com.fasterxml.jackson.core jackson-databind @@ -108,7 +112,7 @@ io.serverlessworkflow - custom-generator + serverless-workflow-custom-generator ${project.version} diff --git a/api/src/main/java/io/serverlessworkflow/api/DirectReader.java b/api/src/main/java/io/serverlessworkflow/api/DirectReader.java new file mode 100644 index 00000000..83fe0550 --- /dev/null +++ b/api/src/main/java/io/serverlessworkflow/api/DirectReader.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.api; + +import io.serverlessworkflow.api.types.Workflow; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; + +class DirectReader implements WorkflowReaderOperations { + + @Override + public Workflow read(InputStream input, WorkflowFormat format) throws IOException { + return format.mapper().readValue(input, Workflow.class); + } + + @Override + public Workflow read(Reader input, WorkflowFormat format) throws IOException { + return format.mapper().readValue(input, Workflow.class); + } + + @Override + public Workflow read(byte[] input, WorkflowFormat format) throws IOException { + return format.mapper().readValue(input, Workflow.class); + } + + @Override + public Workflow read(String input, WorkflowFormat format) throws IOException { + return format.mapper().readValue(input, Workflow.class); + } +} diff --git a/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java b/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java new file mode 100644 index 00000000..25481d5c --- /dev/null +++ b/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.api; + +import com.fasterxml.jackson.databind.JsonNode; +import com.networknt.schema.InputFormat; +import com.networknt.schema.JsonSchema; +import com.networknt.schema.JsonSchemaFactory; +import com.networknt.schema.SchemaValidatorsConfig; +import com.networknt.schema.SpecVersion.VersionFlag; +import com.networknt.schema.ValidationMessage; +import io.serverlessworkflow.api.types.Workflow; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.Set; +import java.util.stream.Collectors; + +class ValidationReader implements WorkflowReaderOperations { + private final JsonSchema schemaObject; + + ValidationReader() { + try (InputStream input = + Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("schema/workflow.yaml")) { + this.schemaObject = + JsonSchemaFactory.getInstance(VersionFlag.V7) + .getSchema(input, InputFormat.YAML, SchemaValidatorsConfig.builder().build()); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public Workflow read(InputStream input, WorkflowFormat format) throws IOException { + return validate(format.mapper().readValue(input, JsonNode.class), format); + } + + @Override + public Workflow read(Reader input, WorkflowFormat format) throws IOException { + return validate(format.mapper().readValue(input, JsonNode.class), format); + } + + @Override + public Workflow read(byte[] input, WorkflowFormat format) throws IOException { + return validate(format.mapper().readValue(input, JsonNode.class), format); + } + + @Override + public Workflow read(String input, WorkflowFormat format) throws IOException { + return validate(format.mapper().readValue(input, JsonNode.class), format); + } + + private Workflow validate(JsonNode value, WorkflowFormat format) { + Set validationErrors = schemaObject.validate(value); + if (!validationErrors.isEmpty()) { + throw new IllegalArgumentException( + validationErrors.stream() + .map(ValidationMessage::toString) + .collect(Collectors.joining("\n"))); + } + return format.mapper().convertValue(value, Workflow.class); + } +} diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java index 4decc696..6868a6dc 100644 --- a/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java +++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java @@ -16,58 +16,98 @@ package io.serverlessworkflow.api; import io.serverlessworkflow.api.types.Workflow; -import java.io.ByteArrayInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Reader; -import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; public class WorkflowReader { public static Workflow readWorkflow(InputStream input, WorkflowFormat format) throws IOException { - return format.mapper().readValue(input, Workflow.class); + return defaultReader().read(input, format); } public static Workflow readWorkflow(Reader input, WorkflowFormat format) throws IOException { - return format.mapper().readValue(input, Workflow.class); + return defaultReader().read(input, format); } - public static Workflow readWorkflow(Path path, WorkflowFormat format) throws IOException { - return format.mapper().readValue(Files.readAllBytes(path), Workflow.class); + public static Workflow readWorkflow(byte[] input, WorkflowFormat format) throws IOException { + return defaultReader().read(input, format); } - public static Workflow readWorkflow(byte[] content, WorkflowFormat format) throws IOException { - try (InputStream input = new ByteArrayInputStream(content)) { - return readWorkflow(input, format); - } + public static Workflow readWorkflow(Path path) throws IOException { + return readWorkflow(path, WorkflowFormat.fromPath(path), defaultReader()); + } + + public static Workflow readWorkflow(Path path, WorkflowFormat format) throws IOException { + return readWorkflow(path, format, defaultReader()); } - public static Workflow readWorkflowFromString(String content, WorkflowFormat format) + public static Workflow readWorkflowFromString(String input, WorkflowFormat format) throws IOException { - try (Reader reader = new StringReader(content)) { - return readWorkflow(reader, format); - } + return defaultReader().read(input, format); } public static Workflow readWorkflowFromClasspath(String classpath) throws IOException { + return readWorkflowFromClasspath(classpath, defaultReader()); + } + + public static Workflow readWorkflowFromClasspath( + String classpath, ClassLoader cl, WorkflowFormat format) throws IOException { + return readWorkflowFromClasspath(classpath, defaultReader()); + } + + public static Workflow readWorkflow(Path path, WorkflowReaderOperations reader) + throws IOException { + return readWorkflow(path, WorkflowFormat.fromPath(path), reader); + } + + public static Workflow readWorkflow( + Path path, WorkflowFormat format, WorkflowReaderOperations reader) throws IOException { + return reader.read(Files.readAllBytes(path), format); + } + + public static Workflow readWorkflowFromClasspath( + String classpath, WorkflowReaderOperations reader) throws IOException { return readWorkflowFromClasspath( classpath, Thread.currentThread().getContextClassLoader(), - WorkflowFormat.fromFileName(classpath)); + WorkflowFormat.fromFileName(classpath), + reader); } public static Workflow readWorkflowFromClasspath( - String classpath, ClassLoader cl, WorkflowFormat format) throws IOException { + String classpath, ClassLoader cl, WorkflowFormat format, WorkflowReaderOperations reader) + throws IOException { try (InputStream in = cl.getResourceAsStream(classpath)) { if (in == null) { throw new FileNotFoundException(classpath); } - return readWorkflow(in, format); + return reader.read(in, format); } } + public static WorkflowReaderOperations noValidation() { + return NoValidationHolder.instance; + } + + public static WorkflowReaderOperations validation() { + return ValidationHolder.instance; + } + + private static class NoValidationHolder { + private static final WorkflowReaderOperations instance = new DirectReader(); + } + + private static class ValidationHolder { + private static final WorkflowReaderOperations instance = new ValidationReader(); + } + + private static WorkflowReaderOperations defaultReader() { + return NoValidationHolder.instance; + } + private WorkflowReader() {} } diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java new file mode 100644 index 00000000..7049aba0 --- /dev/null +++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java @@ -0,0 +1,31 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.api; + +import io.serverlessworkflow.api.types.Workflow; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; + +public interface WorkflowReaderOperations { + Workflow read(InputStream input, WorkflowFormat format) throws IOException; + + Workflow read(Reader input, WorkflowFormat format) throws IOException; + + Workflow read(byte[] input, WorkflowFormat format) throws IOException; + + Workflow read(String input, WorkflowFormat format) throws IOException; +} diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java index 29115396..5980dee6 100644 --- a/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java +++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java @@ -19,7 +19,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; -import java.io.StringWriter; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; @@ -49,10 +48,7 @@ public static void writeWorkflow(Path output, Workflow workflow, WorkflowFormat public static String workflowAsString(Workflow workflow, WorkflowFormat format) throws IOException { - try (Writer writer = new StringWriter()) { - writeWorkflow(writer, workflow, format); - return writer.toString(); - } + return format.mapper().writeValueAsString(workflow); } public static byte[] workflowAsBytes(Workflow workflow, WorkflowFormat format) diff --git a/api/src/main/resources/schema/workflow.yaml b/api/src/main/resources/schema/workflow.yaml index aecbeacb..b59e2f3a 100644 --- a/api/src/main/resources/schema/workflow.yaml +++ b/api/src/main/resources/schema/workflow.yaml @@ -241,33 +241,42 @@ $defs: properties: document: $ref: '#/$defs/externalResource' - title: WithAsyncAPIDocument + title: AsyncAPIDocument description: The document that defines the AsyncAPI operation to call. - operationRef: + channel: type: string - title: WithAsyncAPIOperation + title: With + description: The name of the channel on which to perform the operation. Used only in case the referenced document uses AsyncAPI v2.6.0. + operation: + type: string + title: AsyncAPIOperation description: A reference to the AsyncAPI operation to call. server: + $ref: '#/$defs/asyncApiServer' + title: AsyncAPIServer + description: An object used to configure to the server to call the specified AsyncAPI operation on. + protocol: type: string - title: WithAsyncAPIServer - description: A a reference to the server to call the specified AsyncAPI operation on. If not set, default to the first server matching the operation's channel. + title: AsyncApiProtocol + description: The protocol to use to select the target server. + enum: [ amqp, amqp1, anypointmq, googlepubsub, http, ibmmq, jms, kafka, mercure, mqtt, mqtt5, nats, pulsar, redis, sns, solace, sqs, stomp, ws ] message: - type: string - title: WithAsyncAPIMessage - description: The name of the message to use. If not set, defaults to the first message defined by the operation. - binding: - type: string - title: WithAsyncAPIBinding - description: The name of the binding to use. If not set, defaults to the first binding defined by the operation. - payload: - type: object - title: WithAsyncAPIPayload - description: The payload to call the AsyncAPI operation with, if any. + $ref: '#/$defs/asyncApiOutboundMessage' + title: AsyncApiMessage + description: An object used to configure the message to publish using the target operation. + subscription: + $ref: '#/$defs/asyncApiSubscription' + title: AsyncApiSubscription + description: An object used to configure the subscription to messages consumed using the target operation. authentication: $ref: '#/$defs/referenceableAuthenticationPolicy' - title: WithAsyncAPIAuthentication + title: AsyncAPIAuthentication description: The authentication policy, if any, to use when calling the AsyncAPI operation. - required: [ document, operationRef ] + oneOf: + - required: [ document, operation, message ] + - required: [ document, operation, subscription ] + - required: [ document, channel, message ] + - required: [ document, channel, subscription ] unevaluatedProperties: false - title: CallGRPC description: Defines the GRPC call to perform. @@ -341,29 +350,33 @@ $defs: properties: method: type: string - title: WithHTTPMethod + title: HTTPMethod description: The HTTP method of the HTTP request to perform. endpoint: - title: WithHTTPEndpoint + title: HTTPEndpoint description: The HTTP endpoint to send the request to. $ref: '#/$defs/endpoint' headers: type: object - title: WithHTTPHeaders + title: HTTPHeaders description: A name/value mapping of the headers, if any, of the HTTP request to perform. body: - title: WithHTTPBody + title: HTTPBody description: The body, if any, of the HTTP request to perform. query: type: object - title: WithHTTPQuery + title: HTTPQuery description: A name/value mapping of the query parameters, if any, of the HTTP request to perform. additionalProperties: true output: type: string - title: WithHTTPOutput + title: HTTPOutput description: The http call output format. Defaults to 'content'. enum: [ raw, content, response ] + redirect: + type: boolean + title: HttpRedirect + description: Specifies whether redirection status codes (`300–399`) should be treated as errors. required: [ method, endpoint ] unevaluatedProperties: false - title: CallOpenAPI @@ -403,6 +416,10 @@ $defs: enum: [ raw, content, response ] title: WithOpenAPIOutput description: The http call output format. Defaults to 'content'. + redirect: + type: boolean + title: HttpRedirect + description: Specifies whether redirection status codes (`300–399`) should be treated as errors. required: [ document, operationId ] unevaluatedProperties: false - title: CallFunction @@ -537,7 +554,17 @@ $defs: $ref: '#/$defs/eventConsumptionStrategy' title: ListenTo description: Defines the event(s) to listen to. + read: + type: string + enum: [ data, envelope, raw ] + default: data + title: ListenAndReadAs + description: Specifies how events are read during the listen operation. required: [ to ] + foreach: + $ref: '#/$defs/subscriptionIterator' + title: ListenIterator + description: Configures the iterator, if any, for processing consumed event(s). raiseTask: type: object $ref: '#/$defs/taskBase' @@ -581,6 +608,12 @@ $defs: default: true title: AwaitProcessCompletion description: Whether to await the process completion before continuing. + return: + type: string + title: ProcessReturnType + description: Configures the output of the process. + enum: [ stdout, stderr, code, all, none ] + default: stdout oneOf: - title: RunContainer description: Enables the execution of external processes encapsulated within a containerized environment. @@ -595,6 +628,10 @@ $defs: type: string title: ContainerImage description: The name of the container image to run. + name: + type: string + title: ContainerName + description: A runtime expression, if any, used to give specific name to the container. command: type: string title: ContainerCommand @@ -611,6 +648,10 @@ $defs: type: object title: ContainerEnvironment description: A key/value mapping of the environment variables, if any, to use when running the configured process. + lifetime: + $ref: '#/$defs/containerLifetime' + title: ContainerLifetime + description: An object, if any, used to configure the container's lifetime required: [ image ] required: [ container ] - title: RunScript @@ -1257,6 +1298,12 @@ $defs: - title: ExpressionDataSchema $ref: '#/$defs/runtimeExpression' description: An expression based event data schema. + data: + title: EventData + description: The event's payload data + anyOf: + - $ref: '#/$defs/runtimeExpression' + - {} additionalProperties: true eventConsumptionStrategy: type: object @@ -1276,11 +1323,22 @@ $defs: - title: AnyEventConsumptionStrategy properties: any: - type: array - title: AnyEventConsumptionStrategyConfiguration - description: A list containing any of the events to consume. - items: - $ref: '#/$defs/eventFilter' + type: array + title: AnyEventConsumptionStrategyConfiguration + description: A list containing any of the events to consume. + items: + $ref: '#/$defs/eventFilter' + until: + oneOf: + - type: string + title: AnyEventUntilCondition + description: A runtime expression condition evaluated after consuming an event and which determines whether or not to continue listening. + - allOf: + - $ref: '#/$defs/eventConsumptionStrategy' + description: The strategy that defines the event(s) to consume to stop listening. + - properties: + until: false + title: AnyEventUntilConsumed required: [ any ] - title: OneEventConsumptionStrategy properties: @@ -1522,16 +1580,179 @@ $defs: catalog: type: object title: Catalog - description: The definition of a resource catalog + description: The definition of a resource catalog. unevaluatedProperties: false properties: endpoint: $ref: '#/$defs/endpoint' title: CatalogEndpoint - description: The root URL where the catalog is hosted + description: The root URL where the catalog is hosted. required: [ endpoint ] runtimeExpression: type: string title: RuntimeExpression description: A runtime expression. pattern: "^\\s*\\$\\{.+\\}\\s*$" + containerLifetime: + type: object + title: ContainerLifetime + description: The configuration of a container's lifetime + unevaluatedProperties: false + properties: + cleanup: + type: string + title: ContainerCleanupPolicy + description: The container cleanup policy to use + enum: [ always, never, eventually ] + default: never + after: + $ref: '#/$defs/duration' + title: ContainerLifetimeDuration + description: The duration after which to cleanup the container, in case the cleanup policy has been set to 'eventually' + required: [ cleanup ] + if: + properties: + cleanup: + const: eventually + then: + required: [ after ] + else: + not: + required: [ after ] + processResult: + type: object + title: ProcessResult + description: The object returned by a run task when its return type has been set 'all'. + unevaluatedProperties: false + properties: + code: + type: integer + title: ProcessExitCode + description: The process's exit code. + stdout: + type: string + title: ProcessStandardOutput + description: The content of the process's STDOUT. + stderr: + type: string + title: ProcessStandardError + description: The content of the process's STDERR. + required: [ code, stdout, stderr ] + asyncApiServer: + type: object + title: AsyncApiServer + description: Configures the target server of an AsyncAPI operation. + unevaluatedProperties: false + properties: + name: + type: string + title: AsyncApiServerName + description: The target server's name. + variables: + type: object + title: AsyncApiServerVariables + description: The target server's variables, if any. + required: [ name ] + asyncApiOutboundMessage: + type: object + title: AsyncApiOutboundMessage + description: An object used to configure the message to publish using the target operation. + unevaluatedProperties: false + properties: + payload: + type: object + title: AsyncApiMessagePayload + description: The message's payload, if any. + additionalProperties: true + headers: + type: object + title: AsyncApiMessageHeaders + description: The message's headers, if any. + additionalProperties: true + asyncApiInboundMessage: + type: object + title: AsyncApiInboundMessage + description: Represents a message counsumed by an AsyncAPI subscription. + allOf: + - $ref: '#/$defs/asyncApiOutboundMessage' + properties: + correlationId: + type: string + title: AsyncApiMessageCorrelationId + description: The message's correlation id, if any. + asyncApiSubscription: + type: object + title: AsyncApiSubscription + description: An object used to configure the subscription to messages consumed using the target operation. + unevaluatedProperties: false + properties: + filter: + $ref: '#/$defs/runtimeExpression' + title: AsyncApiSubscriptionCorrelation + description: A runtime expression, if any, used to filter consumed messages. + consume: + $ref: '#/$defs/asyncApiMessageConsumptionPolicy' + title: AsyncApiMessageConsumptionPolicy + description: An object used to configure the subscription's message consumption policy. + foreach: + $ref: '#/$defs/subscriptionIterator' + title: AsyncApiSubscriptionIterator + description: Configures the iterator, if any, for processing consumed messages(s). + required: [ consume ] + asyncApiMessageConsumptionPolicy: + type: object + title: AsyncApiMessageConsumptionPolicy + description: An object used to configure a subscription's message consumption policy. + unevaluatedProperties: false + properties: + for: + $ref: '#/$defs/duration' + title: AsyncApiMessageConsumptionPolicyFor + description: Specifies the time period over which messages will be consumed. + oneOf: + - properties: + amount: + type: integer + description: The amount of (filtered) messages to consume before disposing of the subscription. + title: AsyncApiMessageConsumptionPolicyAmount + required: [ amount ] + - properties: + while: + $ref: '#/$defs/runtimeExpression' + description: A runtime expression evaluated after each consumed (filtered) message to decide if message consumption should continue. + title: AsyncApiMessageConsumptionPolicyWhile + required: [ while ] + - properties: + until: + $ref: '#/$defs/runtimeExpression' + description: A runtime expression evaluated before each consumed (filtered) message to decide if message consumption should continue. + title: AsyncApiMessageConsumptionPolicyUntil + required: [ until ] + subscriptionIterator: + type: object + title: SubscriptionIterator + description: Configures the iteration over each item (event or message) consumed by a subscription. + unevaluatedProperties: false + properties: + item: + type: string + title: SubscriptionIteratorItem + description: The name of the variable used to store the current item being enumerated. + default: item + at: + type: string + title: SubscriptionIteratorIndex + description: The name of the variable used to store the index of the current item being enumerated. + default: index + do: + $ref: '#/$defs/taskList' + title: SubscriptionIteratorTasks + description: The tasks to perform for each consumed item. + output: + $ref: '#/$defs/output' + title: SubscriptionIteratorOutput + description: An object, if any, used to customize the item's output and to document its schema. + export: + $ref: '#/$defs/export' + title: SubscriptionIteratorExport + description: An object, if any, used to customize the content of the workflow context. \ No newline at end of file diff --git a/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java b/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java index 81d10ecf..39d7045b 100644 --- a/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java +++ b/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java @@ -17,6 +17,7 @@ import static io.serverlessworkflow.api.WorkflowReader.readWorkflow; import static io.serverlessworkflow.api.WorkflowReader.readWorkflowFromClasspath; +import static io.serverlessworkflow.api.WorkflowReader.validation; import static io.serverlessworkflow.api.WorkflowWriter.workflowAsBytes; import static io.serverlessworkflow.api.WorkflowWriter.workflowAsString; import static io.serverlessworkflow.api.WorkflowWriter.writeWorkflow; @@ -53,13 +54,13 @@ public class FeaturesTest { "features/set.yaml", "features/switch.yaml", "features/try.yaml", - "features/listen.yaml", + "features/listen-to-any.yaml", "features/callFunction.yaml", "features/callCustomFunction.yaml", "features/call-http-query-parameters.yaml" }) public void testSpecFeaturesParsing(String workflowLocation) throws IOException { - Workflow workflow = readWorkflowFromClasspath(workflowLocation); + Workflow workflow = readWorkflowFromClasspath(workflowLocation, validation()); assertWorkflow(workflow); assertWorkflowEquals(workflow, writeAndReadInMemory(workflow)); } diff --git a/api/src/test/resources/features/callCustomFunction.yaml b/api/src/test/resources/features/callCustomFunction.yaml index 4161cf41..fbb636b4 100644 --- a/api/src/test/resources/features/callCustomFunction.yaml +++ b/api/src/test/resources/features/callCustomFunction.yaml @@ -1,27 +1,25 @@ document: - dsl: 1.0.0-alpha5 - namespace: test - name: call-example - version: 0.1.0 -schedule: - cron: 0 8 * * * + dsl: '1.0.0-alpha5' + namespace: samples + name: call-custom-function-inline + version: '0.1.0' +use: + functions: + getPetById: + input: + schema: + document: + type: object + properties: + petId: + type: string + required: [ petId ] + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} do: -- getData: - call: http - with: - method: get - endpoint: https://api.agify.io?name=meelad - output: - as: ".data.reading" -- filterData: - for: - in: ".data.reading" - each: reading - do: - - log: - call: https://raw.githubusercontent.com/serverlessworkflow/catalog/main/functions/log/1.0.0/function.yaml - with: - level: information - format: "{TIMESTAMP} [{LEVEL}] ({CONTEXT}): {MESSAGE}" - message: Hello, world! - timestamp: true \ No newline at end of file + - getPet: + call: getPetById + with: + petId: 69 \ No newline at end of file diff --git a/api/src/test/resources/features/callOpenAPI.yaml b/api/src/test/resources/features/callOpenAPI.yaml index 1a1d0c56..82843c5d 100644 --- a/api/src/test/resources/features/callOpenAPI.yaml +++ b/api/src/test/resources/features/callOpenAPI.yaml @@ -8,7 +8,7 @@ do: call: openapi with: document: - uri: "https://petstore.swagger.io/v2/swagger.json" + endpoint: "https://petstore.swagger.io/v2/swagger.json" operationId: findPetsByStatus parameters: status: ${ .status } diff --git a/api/src/test/resources/features/listen-to-any.yaml b/api/src/test/resources/features/listen-to-any.yaml new file mode 100644 index 00000000..fa8794d3 --- /dev/null +++ b/api/src/test/resources/features/listen-to-any.yaml @@ -0,0 +1,16 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: listen-to-any + version: '0.1.0' +do: + - callDoctor: + listen: + to: + any: + - with: + type: com.fake-hospital.vitals.measurements.temperature + data: ${ .temperature > 38 } + - with: + type: com.fake-hospital.vitals.measurements.bpm + data: ${ .bpm < 60 or .bpm > 100 } \ No newline at end of file diff --git a/api/src/test/resources/features/listen.yaml b/api/src/test/resources/features/listen.yaml deleted file mode 100644 index 1c56c229..00000000 --- a/api/src/test/resources/features/listen.yaml +++ /dev/null @@ -1,13 +0,0 @@ -document: - dsl: 1.0.0-alpha1 - namespace: default - name: listen-task - version: 1.0.0 -do: - - listenToSomething: - listen: - to: - any: - - with: - source: pepe - type: pepe \ No newline at end of file diff --git a/custom-generator/pom.xml b/custom-generator/pom.xml index 8444bb2a..3660e286 100644 --- a/custom-generator/pom.xml +++ b/custom-generator/pom.xml @@ -1,20 +1,22 @@ - - 4.0.0 - - io.serverlessworkflow - serverlessworkflow-parent - 7.0.0-alpha5.1 - - custom-generator - + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-parent + 8.0.0-SNAPSHOT + + serverless-workflow-custom-generator + Serverless Workflow :: Custom Generator + org.jsonschema2pojo jsonschema2pojo-core - - - - + + + + com.spotify.fmt fmt-maven-plugin @@ -34,6 +36,6 @@ - - + + \ No newline at end of file diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java index d14ba357..622efcbb 100644 --- a/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java +++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java @@ -60,6 +60,7 @@ class AllAnyOneOfSchemaRule extends SchemaRule { } private static final String REF = "$ref"; + private static final String TITLE = "title"; private static final String PATTERN = "pattern"; private enum Format { @@ -154,6 +155,16 @@ public JType apply( && allOfTypes.isEmpty() && refType.isPresent()) { javaType = refType.get(); + } else if (!schemaNode.has("properties") + && oneOfTypes.isEmpty() + && allOfTypes.size() == 1 + && refType.isEmpty()) { + javaType = allOfTypes.get(0).getType(); + } else if (!schemaNode.has("properties") + && oneOfTypes.size() == 1 + && allOfTypes.isEmpty() + && refType.isEmpty()) { + javaType = oneOfTypes.get(0).getType(); } else { JPackage container = generatableType.getPackage(); javaType = ruleFactory.getTypeRule().apply(nodeName, schemaNode, parent, container, schema); @@ -468,25 +479,52 @@ private void unionType( Schema parentSchema, Collection types) { if (schemaNode.has(prefix)) { + ArrayNode array = (ArrayNode) schemaNode.get(prefix); + if (schemaNode.has(TITLE)) { + nodeName = schemaNode.get(TITLE).asText(); + } int i = 0; - for (JsonNode oneOf : (ArrayNode) schemaNode.get(prefix)) { - String ref = parentSchema.getId().toString() + '/' + prefix + '/' + i++; - Schema schema = - ruleFactory - .getSchemaStore() - .create( - URI.create(ref), - ruleFactory.getGenerationConfig().getRefFragmentPathDelimiters()); - types.add( - new JTypeWrapper( - schema.isGenerated() - ? schema.getJavaType() - : apply(nodeName, oneOf, parent, generatableType.getPackage(), schema), - oneOf)); + for (JsonNode oneOf : array) { + if (!ignoreNode(oneOf)) { + String ref = parentSchema.getId().toString() + '/' + prefix + '/' + i++; + Schema schema = + ruleFactory + .getSchemaStore() + .create( + URI.create(ref), + ruleFactory.getGenerationConfig().getRefFragmentPathDelimiters()); + types.add( + new JTypeWrapper( + schema.isGenerated() + ? schema.getJavaType() + : apply(nodeName, oneOf, parent, generatableType.getPackage(), schema), + oneOf)); + } } } } + private static boolean ignoreNode(JsonNode node) { + return allRequired(node) || allRemoveProperties(node); + } + + private static boolean allRemoveProperties(JsonNode node) { + if (node.size() == 1 && node.has("properties")) { + JsonNode propsNode = node.get("properties"); + for (JsonNode propNode : propsNode) { + if (!propNode.isBoolean() || propNode.asBoolean()) { + return false; + } + } + return true; + } + return false; + } + + private static boolean allRequired(JsonNode node) { + return node.size() == 1 && node.has("required"); + } + private Optional refType( String nodeName, JsonNode schemaNode, @@ -507,7 +545,7 @@ private Optional refType( schema.isGenerated() ? schema.getJavaType() : apply( - nameFromRef(ref, nodeName), + nameFromRef(ref, nodeName, schemaNode), schema.getContent(), parent, generatableType, @@ -549,7 +587,10 @@ private String pattern(JsonNode node) { return format != null ? format.pattern() : getFromNode(node, PATTERN); } - private String nameFromRef(String ref, String nodeName) { + private String nameFromRef(String ref, String nodeName, JsonNode schemaNode) { + if (schemaNode.has(TITLE)) { + return schemaNode.get(TITLE).asText(); + } if ("#".equals(ref)) { return nodeName; } diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java new file mode 100644 index 00000000..6411e886 --- /dev/null +++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.generator; + +import com.fasterxml.jackson.databind.JsonNode; +import com.sun.codemodel.JClassAlreadyExistsException; +import com.sun.codemodel.JDefinedClass; +import com.sun.codemodel.JPackage; +import org.jsonschema2pojo.GenerationConfig; +import org.jsonschema2pojo.util.NameHelper; + +public class RefNameHelper extends NameHelper { + + public RefNameHelper(GenerationConfig generationConfig) { + super(generationConfig); + } + + @Override + public String getUniqueClassName(String nodeName, JsonNode node, JPackage _package) { + String className = getClassName(nodeName, node, _package); + try { + JDefinedClass _class = _package._class(className); + _package.remove(_class); + return className; + } catch (JClassAlreadyExistsException ex) { + return super.getUniqueClassName(nodeName, null, _package); + } + } +} diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java index 01263033..f101fb8d 100644 --- a/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java +++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java @@ -18,10 +18,25 @@ import com.sun.codemodel.JClassContainer; import com.sun.codemodel.JDefinedClass; import com.sun.codemodel.JType; +import org.jsonschema2pojo.GenerationConfig; import org.jsonschema2pojo.rules.Rule; import org.jsonschema2pojo.rules.RuleFactory; +import org.jsonschema2pojo.util.NameHelper; public class UnreferencedFactory extends RuleFactory { + + private NameHelper refNameHelper; + + public UnreferencedFactory() { + this.refNameHelper = new RefNameHelper(getGenerationConfig()); + } + + @Override + public void setGenerationConfig(final GenerationConfig generationConfig) { + super.setGenerationConfig(generationConfig); + this.refNameHelper = new RefNameHelper(generationConfig); + } + @Override public Rule getSchemaRule() { return new AllAnyOneOfSchemaRule(this); @@ -36,4 +51,9 @@ public Rule getTypeRule() { public Rule getAdditionalPropertiesRule() { return new UnevaluatedPropertiesRule(this); } + + @Override + public NameHelper getNameHelper() { + return refNameHelper; + } } diff --git a/examples/events/pom.xml b/examples/events/pom.xml new file mode 100644 index 00000000..143a7967 --- /dev/null +++ b/examples/events/pom.xml @@ -0,0 +1,21 @@ + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-examples + 8.0.0-SNAPSHOT + + Serverless Workflow :: Examples :: Events + serverlessworkflow-examples-events + + + io.serverlessworkflow + serverlessworkflow-impl-core + + + org.slf4j + slf4j-simple + + + \ No newline at end of file diff --git a/examples/events/src/main/java/events/EventExample.java b/examples/events/src/main/java/events/EventExample.java new file mode 100644 index 00000000..628782fb --- /dev/null +++ b/examples/events/src/main/java/events/EventExample.java @@ -0,0 +1,50 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package events; + +import io.serverlessworkflow.api.WorkflowReader; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.WorkflowInstance; +import java.io.IOException; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EventExample { + + private static final Logger logger = LoggerFactory.getLogger(EventExample.class); + + public static void main(String[] args) throws IOException { + try (WorkflowApplication appl = WorkflowApplication.builder().build()) { + WorkflowDefinition listenDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("listen.yaml")); + WorkflowDefinition emitDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("emit.yaml")); + WorkflowInstance waitingInstance = listenDefinition.instance(Map.of()); + waitingInstance + .start() + .thenAccept(node -> logger.info("Waiting instance completed with result {}", node)); + logger.info("Listen instance waiting for proper event, Status {}", waitingInstance.status()); + logger.info("Publishing event with temperature 35"); + emitDefinition.instance(Map.of("temperature", 35)).start().join(); + logger.info( + "Listen instance still waiting for proper event, Status {}", waitingInstance.status()); + logger.info("Publishing event with temperature 39"); + emitDefinition.instance(Map.of("temperature", 39)).start().join(); + } + } +} diff --git a/examples/events/src/main/resources/emit.yaml b/examples/events/src/main/resources/emit.yaml new file mode 100644 index 00000000..4d14b030 --- /dev/null +++ b/examples/events/src/main/resources/emit.yaml @@ -0,0 +1,14 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: emit + version: '0.1.0' +do: + - emitEvent: + emit: + event: + with: + source: https://hospital.com + type: com.fake-hospital.vitals.measurements.temperature + data: + temperature: ${.temperature} \ No newline at end of file diff --git a/examples/events/src/main/resources/listen.yaml b/examples/events/src/main/resources/listen.yaml new file mode 100644 index 00000000..e49cea92 --- /dev/null +++ b/examples/events/src/main/resources/listen.yaml @@ -0,0 +1,13 @@ +document: + dsl: '1.0.0-alpha5' + namespace: examples + name: listen + version: '0.1.0' +do: + - callDoctor: + listen: + to: + one: + with: + type: com.fake-hospital.vitals.measurements.temperature + data: ${ .temperature > 38 } \ No newline at end of file diff --git a/examples/pom.xml b/examples/pom.xml new file mode 100644 index 00000000..238ee4b1 --- /dev/null +++ b/examples/pom.xml @@ -0,0 +1,35 @@ + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-parent + 8.0.0-SNAPSHOT + + Serverless Workflow :: Examples + serverlessworkflow-examples + pom + + + + io.serverlessworkflow + serverlessworkflow-impl-core + ${project.version} + + + io.serverlessworkflow + serverlessworkflow-impl-http + ${project.version} + + + org.slf4j + slf4j-simple + ${version.org.slf4j} + + + + + simpleGet + events + + \ No newline at end of file diff --git a/examples/simpleGet/pom.xml b/examples/simpleGet/pom.xml new file mode 100644 index 00000000..923001ae --- /dev/null +++ b/examples/simpleGet/pom.xml @@ -0,0 +1,25 @@ + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-examples + 8.0.0-SNAPSHOT + + serverlessworkflow-examples-simpleGet + Serverless Workflow :: Examples :: SimpleGet + + + io.serverlessworkflow + serverlessworkflow-impl-core + + + io.serverlessworkflow + serverlessworkflow-impl-http + + + org.slf4j + slf4j-simple + + + \ No newline at end of file diff --git a/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java new file mode 100644 index 00000000..233d121f --- /dev/null +++ b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java @@ -0,0 +1,38 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl; + +import io.serverlessworkflow.api.WorkflowReader; +import java.io.IOException; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class BlockingExample { + + private static final Logger logger = LoggerFactory.getLogger(BlockingExample.class); + + public static void main(String[] args) throws IOException { + try (WorkflowApplication appl = WorkflowApplication.builder().build()) { + logger.info( + "Workflow output is {}", + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml")) + .instance(Map.of("petId", 10)) + .start() + .join()); + } + } +} diff --git a/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java new file mode 100644 index 00000000..cb663c1a --- /dev/null +++ b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java @@ -0,0 +1,37 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl; + +import io.serverlessworkflow.api.WorkflowReader; +import java.io.IOException; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class NotBlockingExample { + + private static final Logger logger = LoggerFactory.getLogger(NotBlockingExample.class); + + public static void main(String[] args) throws IOException { + try (WorkflowApplication appl = WorkflowApplication.builder().build()) { + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml")) + .instance(Map.of("petId", 10)) + .start() + .thenAccept(node -> logger.info("Workflow output is {}", node)); + logger.info("The request has been sent, this thread might continue doing stuff"); + } + } +} diff --git a/examples/simpleGet/src/main/resources/get.yaml b/examples/simpleGet/src/main/resources/get.yaml new file mode 100644 index 00000000..7adf3132 --- /dev/null +++ b/examples/simpleGet/src/main/resources/get.yaml @@ -0,0 +1,11 @@ +document: + dsl: '1.0.0-alpha5' + namespace: examples + name: call-http-shorthand-endpoint + version: '0.1.0' +do: + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} diff --git a/impl/README.md b/impl/README.md new file mode 100644 index 00000000..26655a02 --- /dev/null +++ b/impl/README.md @@ -0,0 +1,194 @@ +![Verify JAVA SDK](https://github.com/serverlessworkflow/sdk-java/workflows/Verify%20JAVA%20SDK/badge.svg) +![Deploy JAVA SDK](https://github.com/serverlessworkflow/sdk-java/workflows/Deploy%20JAVA%20SDK/badge.svg) [![Gitpod ready-to-code](https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod)](https://gitpod.io/#https://github.com/serverlessworkflow/sdk-java) + +# Serverless Workflow Specification - Java SDK- Reference Implementation + +Welcome to Java SDK runtime reference implementation, a lightweight implementation of the Serverless Workflow specification which provides a simple, non blocking, reactive API for workflow execution. + +Although initially conceived mainly for testing purposes, it was designed to be easily expanded, so it can eventually become production ready. + +## Status + +This reference implementation is currently capable of running workflows consisting of: + + +* Tasks + * Switch + * Set + * Do + * Raise + * Listen + * Emit + * Fork + * For + * Try + * Wait + * Call + * HTTP +* Schema Validation + * Input + * Output +* Expressions + * Input + * Output + * Export + * Special keywords: runtime, workflow, task... +* Error definitions + + +## Setup + +Before getting started, ensure you have Java 17+ and Maven or Gradle installed. + +Install [Java 17](https://openjdk.org/projects/jdk/17/) +Install [Maven](https://maven.apache.org/install.html) (if using Maven) +Install [Gradle](https://gradle.org/install) (if using Gradle) + +### Dependencies + +This implementation follows a modular approach, keeping dependencies minimal: +- The core library is always required. +- Additional dependencies must be explicitly included if your workflow interacts with external services (e.g., HTTP). +This ensures you only include what you need, preventing unnecessary dependencies. + +#### Maven + +You always need to add this dependency to your pom.xml `dependencies` section: + +```xml + + io.serverlessworkflow + serverlessworkflow-impl-core + 7.0.0.Final + +``` + +And only if your workflow is using HTTP calls, you must add: + +```xml + + io.serverlessworkflow + serverlessworkflow-impl-http + 7.0.0.Final + +``` + +#### Gradle projects: + +You always need to add this dependency to your build.gradle `dependencies` section: + +```text +implementation("io.serverlessworkflow:serverlessworkflow-impl-core:7.0.0.Final") +``` + +And only if your workflow is using HTTP calls, you must add: + +```text +implementation("io.serverlessworkflow:serverlessworkflow-impl-http:7.0.0.Final") +``` + +## How to use + +The quick version is intended for impatient users who want to try something as soon as possible. + +The detailed version is more suitable for those users interested in a more thoughtful discussion of the API. + +### Quick version + +For a quick introduction, we will use a simple workflow [definition](../examples/simpleGet/src/main/resources/get.yaml) that performs a get call. +We are going to show two ways of invoking the workflow: + - blocking the thread till the get request goes through + - returning control to the caller, so the main thread continues while the get is executed + +In order to execute the workflow, blocking the thread till the HTTP request is completed, you should write + +``` java +try (WorkflowApplication appl = WorkflowApplication.builder().build()) { + logger.info( + "Workflow output is {}", + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml")) + .instance(Map.of("petId", 10)) + .start() + .join()); + } +``` +You can find the complete java code [here](../examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java) + +In order to execute the workflow without blocking the calling thread till the HTTP request is completed, you should write + +``` java + try (WorkflowApplication appl = WorkflowApplication.builder().build()) { + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml")) + .instance(Map.of("petId", 10)) + .start() + .thenAccept(node -> logger.info("Workflow output is {}", node)); + } +``` +When the HTTP request is done, both examples will print a similar output + + +```shell +Workflow output is {"id":10,"category":{"id":10,"name":"string"},"name":"doggie","photoUrls":["string"],"tags":[{"id":10,"name":"string"}],"status":"string"} +``` + +You can find the complete java code [here](../examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java) + +### Detailed version + +To discuss runtime API we are going to use a couple of workflow: +- [listen.yaml](../examples/events/src/main/listen.yaml), which waits for an event reporting a temperature greater than 38 +- [emit.yaml](../examples/events/src/main/emit.yaml), which emits events with a certain temperature, specified as workflow parameter. + +Here is a summary of what we are trying to do: + +- The listen.yaml workflow waits for an event (not-blocking). +- We send an event with a low temperature (ignored). +- We send an event with a high temperature (completes the workflow). + +The first step is to create a [WorkflowApplication](core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java) instance. An application is an abstraction that allows customization of different aspects of the workflow execution (for example, change the default `ExecutorService` for thread spawning) + +Since `WorkflowApplication` implements `Autocloseable`, we better use a **try-with-resources** block, ensuring any resource that the workflow might have used is freed when done. + +`try (WorkflowApplication appl = WorkflowApplication.builder().build())` + +Once we have the application object, we use it to parse our definition examples. To load each workflow definition, we use the `readFromClasspath` helper method defined in [WorkflowReader](api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java) class. + +```java + WorkflowDefinition listenDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("listen.yaml")); + WorkflowDefinition emitDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("emit.yaml")); +``` + +A [WorkflowDefinition](core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java) object is immutable and, therefore, thread-safe. It is used to execute as many workflow instances as desired. + +To execute a workflow, we first create a [WorkflowInstance](core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java) object (its initial status is PENDING) and then invoke the `start` method on it (its status is changed to RUNNING). The `start` method returns a [CompletableFuture](https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html), which we use to indicate that a log message should be printed when the workflow is completed. + +```java + WorkflowInstance waitingInstance = listenDefinition.instance(Map.of()); + waitingInstance + .start() + .thenAccept(node -> logger.info("Waiting instance completed with result {}", node)); +``` + +As soon as the workflow execution reach the point where it waits for events to arrive, control is returned to the calling thread. Since the execution is not blocking, we can execute another workflow instance while the first one is waiting. + +We will send an event with a temperature that does not satisfy the criteria, so the listen instance will continue waiting. We use a regular Java `Map` to pass parameters to the workflow instance that sends the event. Note that since we want to wait till the event is published, we call `join` after `start`, telling the `CompletableFuture` to wait for workflow completion. + +```java + emitDefinition.instance(Map.of("temperature", 35)).start().join(); + ``` + + It's time to complete the waiting instance and send an event with the expected temperature. We do so by reusing `emitDefinition`. + +```java + emitDefinition.instance(Map.of("temperature", 39)).start().join(); + ``` + +After that, listen instance will be completed and we will see this log message + +```java +[pool-1-thread-1] INFO events.EventExample - Waiting instance completed with result [{"temperature":39}] +``` +The source code of the example is [here](../examples/events/src/main/java/events/EventExample.java) + diff --git a/impl/bom/pom.xml b/impl/bom/pom.xml deleted file mode 100644 index 604a8300..00000000 --- a/impl/bom/pom.xml +++ /dev/null @@ -1,20 +0,0 @@ - - 4.0.0 - - io.serverlessworkflow - serverlessworkflow-impl - 7.0.0-alpha5.1 - - serverlessworkflow-impl-bom - pom - - - io.serverlessworkflow - serverlessworkflow-impl-core - - - io.serverlessworkflow - serverlessworkflow-impl-http - - - \ No newline at end of file diff --git a/impl/core/pom.xml b/impl/core/pom.xml index 9fac9df6..a5fac29a 100644 --- a/impl/core/pom.xml +++ b/impl/core/pom.xml @@ -1,59 +1,63 @@ - - 4.0.0 - - io.serverlessworkflow - serverlessworkflow-impl - 7.0.0-alpha5.1 - - serverlessworkflow-impl-core - - 1.1.0 - 5.2.3 - - - - io.serverlessworkflow - serverlessworkflow-api - 7.0.0-alpha5.1 - - - com.github.f4b6a3 - ulid-creator - ${version.com.github.f4b6a3} - - - com.networknt - json-schema-validator - - - net.thisptr - jackson-jq - ${version.net.thisptr} - - - org.junit.jupiter - junit-jupiter-api - test - - - org.junit.jupiter - junit-jupiter-engine - test - - - org.junit.jupiter - junit-jupiter-params - test - - - org.assertj - assertj-core - test - - - ch.qos.logback - logback-classic - test - - + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-impl + 8.0.0-SNAPSHOT + + serverlessworkflow-impl-core + Serverless Workflow :: Impl :: Core + + + io.serverlessworkflow + serverlessworkflow-api + ${project.version} + + + io.cloudevents + cloudevents-api + + + io.cloudevents + cloudevents-json-jackson + + + com.github.f4b6a3 + ulid-creator + + + com.networknt + json-schema-validator + + + net.thisptr + jackson-jq + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.assertj + assertj-core + test + + + ch.qos.logback + logback-classic + test + + diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java b/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java new file mode 100644 index 00000000..1ac1f759 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java @@ -0,0 +1,39 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +public class DefaultExecutorServiceFactory implements ExecutorServiceFactory { + + private static final ExecutorServiceFactory instance = new DefaultExecutorServiceFactory(); + + public static ExecutorServiceFactory instance() { + return instance; + } + + private static class ExecutorServiceHolder { + private static ExecutorService instance = Executors.newCachedThreadPool(); + } + + @Override + public ExecutorService get() { + return ExecutorServiceHolder.instance; + } + + private DefaultExecutorServiceFactory() {} +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java new file mode 100644 index 00000000..f899f186 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java @@ -0,0 +1,20 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl; + +import java.util.function.BiFunction; + +public interface ExpressionHolder extends BiFunction {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java index 91b1b6c5..cf5598e7 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java @@ -15,7 +15,5 @@ */ package io.serverlessworkflow.impl; -import java.util.function.BiFunction; - @FunctionalInterface -public interface LongFilter extends BiFunction, Long> {} +public interface LongFilter extends ExpressionHolder {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java index 5d0a648e..2fbec647 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java @@ -15,7 +15,5 @@ */ package io.serverlessworkflow.impl; -import java.util.function.BiFunction; - @FunctionalInterface -public interface StringFilter extends BiFunction, String> {} +public interface StringFilter extends ExpressionHolder {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java b/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java index dde5a315..4fc3d1f4 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java @@ -16,76 +16,64 @@ package io.serverlessworkflow.impl; import com.fasterxml.jackson.databind.JsonNode; -import io.serverlessworkflow.api.types.FlowDirective; -import io.serverlessworkflow.api.types.FlowDirectiveEnum; import io.serverlessworkflow.api.types.TaskBase; +import io.serverlessworkflow.impl.executors.TransitionInfo; import java.time.Instant; import java.util.HashMap; import java.util.Map; +import java.util.Optional; -public class TaskContext { +public class TaskContext { private final JsonNode rawInput; - private final T task; + private final TaskBase task; private final WorkflowPosition position; private final Instant startedAt; + private final String taskName; + private final Map contextVariables; + private final Optional parentContext; private JsonNode input; private JsonNode output; private JsonNode rawOutput; - private FlowDirective flowDirective; - private Map contextVariables; private Instant completedAt; + private TransitionInfo transition; - public TaskContext(JsonNode input, WorkflowPosition position) { - this(input, null, position, Instant.now(), input, input, input, null, new HashMap<>()); - } - - public TaskContext(JsonNode input, TaskContext taskContext, T task) { - this( - input, - task, - taskContext.position, - Instant.now(), - input, - input, - input, - task.getThen(), - new HashMap<>(taskContext.variables())); + public TaskContext( + JsonNode input, + WorkflowPosition position, + Optional parentContext, + String taskName, + TaskBase task) { + this(input, parentContext, taskName, task, position, Instant.now(), input, input, input); } private TaskContext( JsonNode rawInput, - T task, + Optional parentContext, + String taskName, + TaskBase task, WorkflowPosition position, Instant startedAt, JsonNode input, JsonNode output, - JsonNode rawOutput, - FlowDirective flowDirective, - Map contextVariables) { + JsonNode rawOutput) { this.rawInput = rawInput; + this.parentContext = parentContext; + this.taskName = taskName; this.task = task; this.position = position; this.startedAt = startedAt; this.input = input; this.output = output; this.rawOutput = rawOutput; - this.flowDirective = flowDirective; - this.contextVariables = contextVariables; + this.contextVariables = + parentContext.map(p -> new HashMap<>(p.contextVariables)).orElseGet(HashMap::new); } - public TaskContext copy() { - return new TaskContext( - rawInput, - task, - position.copy(), - startedAt, - input, - output, - rawOutput, - flowDirective, - new HashMap<>(contextVariables)); + public TaskContext copy() { + return new TaskContext( + rawInput, parentContext, taskName, task, position, startedAt, input, output, rawOutput); } public void input(JsonNode input) { @@ -102,54 +90,64 @@ public JsonNode rawInput() { return rawInput; } - public T task() { + public TaskBase task() { return task; } - public void rawOutput(JsonNode output) { + public TaskContext rawOutput(JsonNode output) { this.rawOutput = output; this.output = output; + return this; } public JsonNode rawOutput() { return rawOutput; } - public void output(JsonNode output) { + public TaskContext output(JsonNode output) { this.output = output; + return this; } public JsonNode output() { return output; } - public void flowDirective(FlowDirective flowDirective) { - this.flowDirective = flowDirective; - } - - public FlowDirective flowDirective() { - return flowDirective == null - ? new FlowDirective().withFlowDirectiveEnum(FlowDirectiveEnum.CONTINUE) - : flowDirective; + public WorkflowPosition position() { + return position; } public Map variables() { return contextVariables; } - public WorkflowPosition position() { - return position; - } - public Instant startedAt() { return startedAt; } - public void completedAt(Instant instant) { + public Optional parent() { + return parentContext; + } + + public String taskName() { + return taskName; + } + + public TaskContext completedAt(Instant instant) { this.completedAt = instant; + return this; } public Instant completedAt() { return completedAt; } + + public TransitionInfo transition() { + return transition; + } + + public TaskContext transition(TransitionInfo transition) { + this.transition = transition; + return this; + } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java index f36c23f6..b998c57d 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java @@ -18,6 +18,9 @@ import com.github.f4b6a3.ulid.UlidCreator; import io.serverlessworkflow.api.types.Document; import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.events.EventConsumer; +import io.serverlessworkflow.impl.events.EventPublisher; +import io.serverlessworkflow.impl.events.InMemoryEvents; import io.serverlessworkflow.impl.executors.DefaultTaskExecutorFactory; import io.serverlessworkflow.impl.executors.TaskExecutorFactory; import io.serverlessworkflow.impl.expressions.ExpressionFactory; @@ -47,29 +50,22 @@ public class WorkflowApplication implements AutoCloseable { private final WorkflowPositionFactory positionFactory; private final ExecutorServiceFactory executorFactory; private final RuntimeDescriptorFactory runtimeDescriptorFactory; - - private ExecutorService executorService; - - public WorkflowApplication( - TaskExecutorFactory taskFactory, - ExpressionFactory exprFactory, - ResourceLoaderFactory resourceLoaderFactory, - SchemaValidatorFactory schemaValidatorFactory, - WorkflowPositionFactory positionFactory, - WorkflowIdFactory idFactory, - RuntimeDescriptorFactory runtimeDescriptorFactory, - ExecutorServiceFactory executorFactory, - Collection listeners) { - this.taskFactory = taskFactory; - this.exprFactory = exprFactory; - this.resourceLoaderFactory = resourceLoaderFactory; - this.schemaValidatorFactory = schemaValidatorFactory; - this.positionFactory = positionFactory; - this.idFactory = idFactory; - this.runtimeDescriptorFactory = runtimeDescriptorFactory; - this.executorFactory = executorFactory; - this.listeners = listeners; + private final EventConsumer eventConsumer; + private final EventPublisher eventPublisher; + + private WorkflowApplication(Builder builder) { + this.taskFactory = builder.taskFactory; + this.exprFactory = builder.exprFactory; + this.resourceLoaderFactory = builder.resourceLoaderFactory; + this.schemaValidatorFactory = builder.schemaValidatorFactory; + this.positionFactory = builder.positionFactory; + this.idFactory = builder.idFactory; + this.runtimeDescriptorFactory = builder.descriptorFactory; + this.executorFactory = builder.executorFactory; + this.listeners = builder.listeners != null ? builder.listeners : Collections.emptySet(); this.definitions = new ConcurrentHashMap<>(); + this.eventConsumer = builder.eventConsumer; + this.eventPublisher = builder.eventPublisher; } public TaskExecutorFactory taskFactory() { @@ -96,6 +92,10 @@ public Collection listeners() { return listeners; } + public EventPublisher eventPublisher() { + return eventPublisher; + } + public WorkflowIdFactory idFactory() { return idFactory; } @@ -109,6 +109,8 @@ public static class Builder { private WorkflowPositionFactory positionFactory = () -> new QueueWorkflowPosition(); private WorkflowIdFactory idFactory = () -> UlidCreator.getMonotonicUlid().toString(); private ExecutorServiceFactory executorFactory = () -> Executors.newCachedThreadPool(); + private EventConsumer eventConsumer = InMemoryEvents.get(); + private EventPublisher eventPublisher = InMemoryEvents.get(); private RuntimeDescriptorFactory descriptorFactory = () -> new RuntimeDescriptor("reference impl", "1.0.0_alpha", Collections.emptyMap()); @@ -162,19 +164,18 @@ public Builder withDescriptorFactory(RuntimeDescriptorFactory factory) { return this; } + public Builder withEventConsumer(EventConsumer eventConsumer) { + this.eventConsumer = eventConsumer; + return this; + } + + public Builder withEventPublisher(EventPublisher eventPublisher) { + this.eventPublisher = eventPublisher; + return this; + } + public WorkflowApplication build() { - return new WorkflowApplication( - taskFactory, - exprFactory, - resourceLoaderFactory, - schemaValidatorFactory, - positionFactory, - idFactory, - descriptorFactory, - executorFactory, - listeners == null - ? Collections.emptySet() - : Collections.unmodifiableCollection(listeners)); + return new WorkflowApplication(this); } } @@ -190,7 +191,7 @@ public WorkflowDefinition workflowDefinition(Workflow workflow) { } @Override - public void close() throws Exception { + public void close() { for (WorkflowDefinition definition : definitions.values()) { definition.close(); } @@ -205,12 +206,12 @@ public RuntimeDescriptorFactory runtimeDescriptorFactory() { return runtimeDescriptorFactory; } + @SuppressWarnings("rawtypes") + public EventConsumer eventConsumer() { + return eventConsumer; + } + public ExecutorService executorService() { - synchronized (executorFactory) { - if (executorService == null) { - executorService = executorFactory.get(); - } - } - return executorService; + return executorFactory.get(); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java index f45f1b84..96890c8b 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java @@ -20,6 +20,7 @@ public class WorkflowContext { private final WorkflowDefinition definition; private final WorkflowInstance instance; + private JsonNode context; WorkflowContext(WorkflowDefinition definition, WorkflowInstance instance) { this.definition = definition; @@ -31,11 +32,11 @@ public WorkflowInstance instance() { } public JsonNode context() { - return instance.context(); + return context; } public void context(JsonNode context) { - this.instance.context(context); + this.context = context; } public WorkflowDefinition definition() { diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java index df5b70e1..1a789616 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java @@ -19,21 +19,15 @@ import io.serverlessworkflow.api.types.Input; import io.serverlessworkflow.api.types.Output; -import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.executors.TaskExecutor; -import io.serverlessworkflow.impl.executors.TaskExecutorFactory; -import io.serverlessworkflow.impl.expressions.ExpressionFactory; +import io.serverlessworkflow.impl.executors.TaskExecutorHelper; import io.serverlessworkflow.impl.json.JsonUtils; import io.serverlessworkflow.impl.jsonschema.SchemaValidator; -import io.serverlessworkflow.impl.jsonschema.SchemaValidatorFactory; import io.serverlessworkflow.impl.resources.ResourceLoader; import java.nio.file.Path; import java.util.Collection; -import java.util.Map; import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; public class WorkflowDefinition implements AutoCloseable { @@ -42,16 +36,13 @@ public class WorkflowDefinition implements AutoCloseable { private Optional outputSchemaValidator = Optional.empty(); private Optional inputFilter = Optional.empty(); private Optional outputFilter = Optional.empty(); - private final Map> taskExecutors = - new ConcurrentHashMap<>(); - private final ResourceLoader resourceLoader; private final WorkflowApplication application; + private final TaskExecutor taskExecutor; private WorkflowDefinition( WorkflowApplication application, Workflow workflow, ResourceLoader resourceLoader) { this.workflow = workflow; this.application = application; - this.resourceLoader = resourceLoader; if (workflow.getInput() != null) { Input input = workflow.getInput(); this.inputSchemaValidator = @@ -64,6 +55,13 @@ private WorkflowDefinition( getSchemaValidator(application.validatorFactory(), resourceLoader, output.getSchema()); this.outputFilter = buildWorkflowFilter(application.expressionFactory(), output.getAs()); } + this.taskExecutor = + TaskExecutorHelper.createExecutorList( + application.positionFactory().get(), + workflow.getDo(), + workflow, + application, + resourceLoader); } static WorkflowDefinition of(WorkflowApplication application, Workflow workflow) { @@ -75,15 +73,19 @@ static WorkflowDefinition of(WorkflowApplication application, Workflow workflow, application, workflow, application.resourceLoaderFactory().getResourceLoader(path)); } - public WorkflowInstance execute(Object input) { + public WorkflowInstance instance(Object input) { return new WorkflowInstance(this, JsonUtils.fromValue(input)); } - public Optional inputSchemaValidator() { + Optional inputSchemaValidator() { return inputSchemaValidator; } - public Optional inputFilter() { + TaskExecutor startTask() { + return taskExecutor; + } + + Optional inputFilter() { return inputFilter; } @@ -95,51 +97,22 @@ public Collection listeners() { return application.listeners(); } - public Map> taskExecutors() { - return taskExecutors; - } - - public TaskExecutorFactory taskFactory() { - return application.taskFactory(); - } - - public Optional outputFilter() { + Optional outputFilter() { return outputFilter; } - public WorkflowIdFactory idFactory() { - return application.idFactory(); - } - - public Optional outputSchemaValidator() { + Optional outputSchemaValidator() { return outputSchemaValidator; } - public ExpressionFactory expressionFactory() { - return application.expressionFactory(); - } - - public SchemaValidatorFactory validatorFactory() { - return application.validatorFactory(); - } - - public ResourceLoader resourceLoader() { - - return resourceLoader; - } - - public WorkflowPositionFactory positionFactory() { - return application.positionFactory(); - } - - public ExecutorService executorService() { - return application.executorService(); - } - public RuntimeDescriptorFactory runtimeDescriptorFactory() { return application.runtimeDescriptorFactory(); } + public WorkflowApplication application() { + return application; + } + @Override public void close() { // TODO close resourcers hold for uncompleted process instances, if any diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java index 1823be94..b72cdbb0 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java @@ -26,13 +26,13 @@ public static Builder error(String type, int status) { return new Builder(type, status); } - public static Builder communication(int status, TaskContext context, Exception ex) { + public static Builder communication(int status, TaskContext context, Exception ex) { return new Builder(COMM_TYPE, status) .instance(context.position().jsonPointer()) .title(ex.getMessage()); } - public static Builder runtime(int status, TaskContext context, Exception ex) { + public static Builder runtime(int status, TaskContext context, Exception ex) { return new Builder(RUNTIME_TYPE, status) .instance(context.position().jsonPointer()) .title(ex.getMessage()); diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java index 7d25df48..4475cacd 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java @@ -19,5 +19,5 @@ @FunctionalInterface public interface WorkflowFilter { - JsonNode apply(WorkflowContext workflow, TaskContext task, JsonNode node); + JsonNode apply(WorkflowContext workflow, TaskContext task, JsonNode node); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java index f81a6f24..2e55c484 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java @@ -15,42 +15,62 @@ */ package io.serverlessworkflow.impl; -import static io.serverlessworkflow.impl.json.JsonUtils.toJavaValue; - import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.NullNode; import io.serverlessworkflow.impl.executors.TaskExecutorHelper; +import io.serverlessworkflow.impl.json.JsonUtils; import java.time.Instant; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicReference; public class WorkflowInstance { private final AtomicReference status; - private final TaskContext taskContext; private final String id; private final JsonNode input; - private final Instant startedAt; - private final AtomicReference context; + + private WorkflowContext workflowContext; + private WorkflowDefinition definition; + private Instant startedAt; + private Instant completedAt; + private volatile JsonNode output; + private CompletableFuture completableFuture; WorkflowInstance(WorkflowDefinition definition, JsonNode input) { - this.id = definition.idFactory().get(); + this.id = definition.application().idFactory().get(); this.input = input; + this.definition = definition; + this.status = new AtomicReference<>(WorkflowStatus.PENDING); definition.inputSchemaValidator().ifPresent(v -> v.validate(input)); + } + + public CompletableFuture start() { this.startedAt = Instant.now(); - WorkflowContext workflowContext = new WorkflowContext(definition, this); - taskContext = new TaskContext<>(input, definition.positionFactory().get()); - definition - .inputFilter() - .ifPresent(f -> taskContext.input(f.apply(workflowContext, taskContext, input))); - status = new AtomicReference<>(WorkflowStatus.RUNNING); - context = new AtomicReference<>(NullNode.getInstance()); - TaskExecutorHelper.processTaskList(definition.workflow().getDo(), workflowContext, taskContext); - definition - .outputFilter() - .ifPresent( - f -> - taskContext.output(f.apply(workflowContext, taskContext, taskContext.rawOutput()))); - definition.outputSchemaValidator().ifPresent(v -> v.validate(taskContext.output())); + this.workflowContext = new WorkflowContext(definition, this); + this.status.set(WorkflowStatus.RUNNING); + this.completableFuture = + TaskExecutorHelper.processTaskList( + definition.startTask(), + workflowContext, + Optional.empty(), + definition + .inputFilter() + .map(f -> f.apply(workflowContext, null, input)) + .orElse(input)) + .thenApply(this::whenCompleted); + return completableFuture; + } + + private JsonNode whenCompleted(JsonNode node) { + output = + workflowContext + .definition() + .outputFilter() + .map(f -> f.apply(workflowContext, null, node)) + .orElse(node); + workflowContext.definition().outputSchemaValidator().ifPresent(v -> v.validate(output)); status.compareAndSet(WorkflowStatus.RUNNING, WorkflowStatus.COMPLETED); + completedAt = Instant.now(); + return output; } public String id() { @@ -61,12 +81,12 @@ public Instant startedAt() { return startedAt; } - public JsonNode input() { - return input; + public Instant completedAt() { + return completedAt; } - public JsonNode context() { - return context.get(); + public JsonNode input() { + return input; } public WorkflowStatus status() { @@ -78,14 +98,10 @@ public void status(WorkflowStatus state) { } public Object output() { - return toJavaValue(taskContext.output()); + return JsonUtils.toJavaValue(outputAsJsonNode()); } public JsonNode outputAsJsonNode() { - return taskContext.output(); - } - - void context(JsonNode context) { - this.context.set(context); + return output; } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java index 0866ba05..5feaf04e 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java @@ -24,6 +24,7 @@ import io.serverlessworkflow.api.types.SchemaExternal; import io.serverlessworkflow.api.types.SchemaInline; import io.serverlessworkflow.api.types.SchemaUnion; +import io.serverlessworkflow.api.types.UriTemplate; import io.serverlessworkflow.impl.expressions.Expression; import io.serverlessworkflow.impl.expressions.ExpressionFactory; import io.serverlessworkflow.impl.expressions.ExpressionUtils; @@ -35,8 +36,10 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.net.URI; import java.util.Map; import java.util.Optional; +import java.util.function.Function; public class WorkflowUtils { @@ -81,6 +84,25 @@ public static Optional buildWorkflowFilter( : Optional.empty(); } + public static ExpressionHolder buildExpressionHolder( + ExpressionFactory exprFactory, + String expression, + T literal, + Function converter) { + return expression != null + ? buildExpressionHolder(buildWorkflowFilter(exprFactory, expression), converter) + : buildExpressionHolder(literal); + } + + private static ExpressionHolder buildExpressionHolder( + WorkflowFilter filter, Function converter) { + return (w, t) -> converter.apply(filter.apply(w, t, t.input())); + } + + private static ExpressionHolder buildExpressionHolder(T literal) { + return (w, t) -> literal; + } + public static Optional buildWorkflowFilter( ExpressionFactory exprFactory, ExportAs as) { return as != null @@ -109,7 +131,7 @@ private static StringFilter toString(String literal) { return (w, t) -> literal; } - private static WorkflowFilter buildWorkflowFilter( + public static WorkflowFilter buildWorkflowFilter( ExpressionFactory exprFactory, String str, Object object) { if (str != null) { return buildWorkflowFilter(exprFactory, str); @@ -148,4 +170,9 @@ public static WorkflowFilter buildWorkflowFilter(ExpressionFactory exprFactory, public static Optional optionalFilter(ExpressionFactory exprFactory, String str) { return str != null ? Optional.of(buildWorkflowFilter(exprFactory, str)) : Optional.empty(); } + + public static String toString(UriTemplate template) { + URI uri = template.getLiteralUri(); + return uri != null ? uri.toString() : template.getLiteralUriTemplate(); + } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java new file mode 100644 index 00000000..a3222342 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java @@ -0,0 +1,136 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; +import io.serverlessworkflow.api.types.EventFilter; +import io.serverlessworkflow.api.types.EventProperties; +import io.serverlessworkflow.impl.WorkflowApplication; +import java.util.AbstractCollection; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class AbstractTypeConsumer + implements EventConsumer { + + private static final Logger logger = LoggerFactory.getLogger(AbstractTypeConsumer.class); + + protected abstract void registerToAll(Consumer consumer); + + protected abstract void unregisterFromAll(); + + protected abstract void register(String topicName, Consumer consumer); + + protected abstract void unregister(String topicName); + + private Map registrations = new ConcurrentHashMap<>(); + + @Override + public TypeEventRegistrationBuilder listen( + EventFilter register, WorkflowApplication application) { + EventProperties properties = register.getWith(); + String type = properties.getType(); + return new TypeEventRegistrationBuilder( + type, new DefaultCloudEventPredicate(properties, application.expressionFactory())); + } + + @Override + public Collection listenToAll(WorkflowApplication application) { + return List.of(new TypeEventRegistrationBuilder(null, null)); + } + + private static class CloudEventConsumer extends AbstractCollection + implements Consumer { + private Collection registrations = new CopyOnWriteArrayList<>(); + + @Override + public void accept(CloudEvent ce) { + logger.debug("Received cloud event {}", ce); + for (TypeEventRegistration registration : registrations) { + if (registration.predicate().test(ce)) { + registration.consumer().accept(ce); + } + } + } + + @Override + public boolean add(TypeEventRegistration registration) { + return registrations.add(registration); + } + + @Override + public boolean remove(Object registration) { + return registrations.remove(registration); + } + + @Override + public Iterator iterator() { + return registrations.iterator(); + } + + @Override + public int size() { + return registrations.size(); + } + } + + public TypeEventRegistration register( + TypeEventRegistrationBuilder builder, Consumer ce) { + if (builder.type() == null) { + registerToAll(ce); + return new TypeEventRegistration(null, ce, null); + } else { + TypeEventRegistration registration = + new TypeEventRegistration(builder.type(), ce, builder.cePredicate()); + registrations + .computeIfAbsent( + registration.type(), + k -> { + CloudEventConsumer consumer = new CloudEventConsumer(); + register(k, consumer); + return consumer; + }) + .add(registration); + return registration; + } + } + + @Override + public void unregister(TypeEventRegistration registration) { + if (registration.type() == null) { + unregisterFromAll(); + } else { + registrations.computeIfPresent( + registration.type(), + (k, v) -> { + v.remove(registration); + if (v.isEmpty()) { + unregister(registration.type()); + return null; + } else { + return v; + } + }); + } + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java new file mode 100644 index 00000000..6029d484 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java @@ -0,0 +1,21 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +@FunctionalInterface +public interface CloudEventAttrPredicate { + boolean test(T value); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java new file mode 100644 index 00000000..a790e371 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java @@ -0,0 +1,22 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; + +public interface CloudEventPredicate { + boolean test(CloudEvent event); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java new file mode 100644 index 00000000..1b2709b8 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java @@ -0,0 +1,101 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.NullNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.cloudevents.CloudEvent; +import io.cloudevents.CloudEventData; +import io.cloudevents.core.builder.CloudEventBuilder; +import io.cloudevents.jackson.JsonCloudEventData; +import io.serverlessworkflow.impl.json.JsonUtils; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CloudEventUtils { + + public static JsonNode toJsonNode(CloudEvent event) { + ObjectNode result = JsonUtils.mapper().createObjectNode(); + if (event.getData() != null) { + result.set("data", toJsonNode(event.getData())); + } + if (event.getSubject() != null) { + result.put("subject", event.getSubject()); + } + if (event.getDataContentType() != null) { + result.put("datacontenttype", event.getDataContentType()); + } + result.put("id", event.getId()); + result.put("source", event.getSource().toString()); + result.put("type", event.getType()); + result.put("specversion", event.getSpecVersion().toString()); + if (event.getDataSchema() != null) { + result.put("dataschema", event.getDataSchema().toString()); + } + if (event.getTime() != null) { + result.put("time", event.getTime().toString()); + } + event + .getExtensionNames() + .forEach(n -> result.set(n, JsonUtils.fromValue(event.getExtension(n)))); + return result; + } + + public static OffsetDateTime toOffset(Date date) { + return date.toInstant().atOffset(ZoneOffset.UTC); + } + + public static CloudEventBuilder addExtension( + CloudEventBuilder builder, String name, JsonNode value) { + if (value.isTextual()) { + builder.withExtension(name, value.asText()); + } else if (value.isBoolean()) { + builder.withExtension(name, value.isBoolean()); + } else if (value.isNumber()) { + builder.withExtension(name, value.numberValue()); + } + return builder; + } + + public static JsonNode toJsonNode(CloudEventData data) { + if (data == null) { + return NullNode.instance; + } + try { + return data instanceof JsonCloudEventData + ? ((JsonCloudEventData) data).getNode() + : JsonUtils.mapper().readTree(data.toBytes()); + } catch (IOException io) { + throw new UncheckedIOException(io); + } + } + + public static Map extensions(CloudEvent event) { + Map result = new LinkedHashMap<>(); + for (String name : event.getExtensionNames()) { + result.put(name, event.getExtension(name)); + } + return result; + } + + private CloudEventUtils() {} +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java new file mode 100644 index 00000000..6eb35995 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java @@ -0,0 +1,154 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import com.fasterxml.jackson.databind.JsonNode; +import io.cloudevents.CloudEvent; +import io.serverlessworkflow.api.types.EventData; +import io.serverlessworkflow.api.types.EventDataschema; +import io.serverlessworkflow.api.types.EventProperties; +import io.serverlessworkflow.api.types.EventSource; +import io.serverlessworkflow.api.types.EventTime; +import io.serverlessworkflow.api.types.UriTemplate; +import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.expressions.Expression; +import io.serverlessworkflow.impl.expressions.ExpressionFactory; +import io.serverlessworkflow.impl.json.JsonUtils; +import java.net.URI; +import java.time.OffsetDateTime; +import java.util.Map; +import java.util.Objects; + +public class DefaultCloudEventPredicate implements CloudEventPredicate { + + private final CloudEventAttrPredicate idFilter; + private final CloudEventAttrPredicate sourceFilter; + private final CloudEventAttrPredicate subjectFilter; + private final CloudEventAttrPredicate contentTypeFilter; + private final CloudEventAttrPredicate typeFilter; + private final CloudEventAttrPredicate dataSchemaFilter; + private final CloudEventAttrPredicate timeFilter; + private final CloudEventAttrPredicate dataFilter; + private final CloudEventAttrPredicate additionalFilter; + + private static final CloudEventAttrPredicate isTrue() { + return x -> true; + } + + public DefaultCloudEventPredicate(EventProperties properties, ExpressionFactory exprFactory) { + idFilter = stringFilter(properties.getId()); + subjectFilter = stringFilter(properties.getSubject()); + typeFilter = stringFilter(properties.getType()); + contentTypeFilter = stringFilter(properties.getDatacontenttype()); + sourceFilter = sourceFilter(properties.getSource(), exprFactory); + dataSchemaFilter = dataSchemaFilter(properties.getDataschema(), exprFactory); + timeFilter = offsetTimeFilter(properties.getTime(), exprFactory); + dataFilter = dataFilter(properties.getData(), exprFactory); + additionalFilter = additionalFilter(properties.getAdditionalProperties(), exprFactory); + } + + private CloudEventAttrPredicate additionalFilter( + Map additionalProperties, ExpressionFactory exprFactory) { + return additionalProperties != null && !additionalProperties.isEmpty() + ? from(WorkflowUtils.buildWorkflowFilter(exprFactory, null, additionalProperties)) + : isTrue(); + } + + private CloudEventAttrPredicate from(WorkflowFilter filter) { + return d -> filter.apply(null, null, d).asBoolean(); + } + + private CloudEventAttrPredicate dataFilter( + EventData data, ExpressionFactory exprFactory) { + return data != null + ? from( + WorkflowUtils.buildWorkflowFilter( + exprFactory, data.getRuntimeExpression(), data.getObject())) + : isTrue(); + } + + private CloudEventAttrPredicate offsetTimeFilter( + EventTime time, ExpressionFactory exprFactory) { + if (time != null) { + if (time.getRuntimeExpression() != null) { + final Expression expr = exprFactory.getExpression(time.getRuntimeExpression()); + return s -> evalExpr(expr, toString(s)); + } else if (time.getLiteralTime() != null) { + return s -> Objects.equals(s, CloudEventUtils.toOffset(time.getLiteralTime())); + } + } + return isTrue(); + } + + private CloudEventAttrPredicate dataSchemaFilter( + EventDataschema dataSchema, ExpressionFactory exprFactory) { + if (dataSchema != null) { + if (dataSchema.getExpressionDataSchema() != null) { + final Expression expr = exprFactory.getExpression(dataSchema.getExpressionDataSchema()); + return s -> evalExpr(expr, toString(s)); + } else if (dataSchema.getLiteralDataSchema() != null) { + return templateFilter(dataSchema.getLiteralDataSchema()); + } + } + return isTrue(); + } + + private CloudEventAttrPredicate stringFilter(String str) { + return str == null ? isTrue() : x -> x.equals(str); + } + + private CloudEventAttrPredicate sourceFilter( + EventSource source, ExpressionFactory exprFactory) { + if (source != null) { + if (source.getRuntimeExpression() != null) { + final Expression expr = exprFactory.getExpression(source.getRuntimeExpression()); + return s -> evalExpr(expr, toString(s)); + } else if (source.getUriTemplate() != null) { + return templateFilter(source.getUriTemplate()); + } + } + return isTrue(); + } + + private CloudEventAttrPredicate templateFilter(UriTemplate template) { + if (template.getLiteralUri() != null) { + return u -> Objects.equals(u, template.getLiteralUri()); + } + throw new UnsupportedOperationException("Template not supporte here yet"); + } + + private String toString(T uri) { + return uri != null ? uri.toString() : null; + } + + private boolean evalExpr(Expression expr, T value) { + return expr.eval(null, null, JsonUtils.fromValue(value)).asBoolean(); + } + + @Override + public boolean test(CloudEvent event) { + return idFilter.test(event.getId()) + && sourceFilter.test(event.getSource()) + && subjectFilter.test(event.getSubject()) + && contentTypeFilter.test(event.getDataContentType()) + && typeFilter.test(event.getType()) + && dataSchemaFilter.test(event.getDataSchema()) + && timeFilter.test(event.getTime()) + && dataFilter.test(CloudEventUtils.toJsonNode(event.getData())) + && additionalFilter.test(JsonUtils.fromValue(CloudEventUtils.extensions(event))); + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java new file mode 100644 index 00000000..00c1619e --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java @@ -0,0 +1,33 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; +import io.serverlessworkflow.api.types.EventFilter; +import io.serverlessworkflow.impl.WorkflowApplication; +import java.util.Collection; +import java.util.function.Consumer; + +public interface EventConsumer { + + V listen(EventFilter filter, WorkflowApplication workflowApplication); + + Collection listenToAll(WorkflowApplication workflowApplication); + + T register(V builder, Consumer consumer); + + void unregister(T register); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java new file mode 100644 index 00000000..08cc121d --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java @@ -0,0 +1,23 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; +import java.util.concurrent.CompletableFuture; + +public interface EventPublisher { + CompletableFuture publish(CloudEvent event); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java new file mode 100644 index 00000000..923647d5 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java @@ -0,0 +1,18 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +public interface EventRegistration {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java new file mode 100644 index 00000000..e81723ff --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java @@ -0,0 +1,18 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +public interface EventRegistrationBuilder {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java new file mode 100644 index 00000000..714d89d0 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; +import io.serverlessworkflow.impl.DefaultExecutorServiceFactory; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +/* + * Straightforward implementation of in memory event broker. + * User might invoke notifyCE to simulate event reception. + */ +public class InMemoryEvents extends AbstractTypeConsumer implements EventPublisher { + + private static InMemoryEvents instance = new InMemoryEvents(); + + private InMemoryEvents() {} + + public static InMemoryEvents get() { + return instance; + } + + private Map> topicMap = new ConcurrentHashMap<>(); + + private AtomicReference> allConsumerRef = new AtomicReference<>(); + + @Override + protected void register(String topicName, Consumer consumer) { + topicMap.put(topicName, consumer); + } + + @Override + protected void unregister(String topicName) { + topicMap.remove(topicName); + } + + @Override + public CompletableFuture publish(CloudEvent ce) { + return CompletableFuture.runAsync( + () -> { + Consumer allConsumer = allConsumerRef.get(); + if (allConsumer != null) { + allConsumer.accept(ce); + } + Consumer consumer = topicMap.get(ce.getType()); + if (consumer != null) { + consumer.accept(ce); + } + }, + DefaultExecutorServiceFactory.instance().get()); + } + + @Override + protected void registerToAll(Consumer consumer) { + allConsumerRef.set(consumer); + } + + @Override + protected void unregisterFromAll() { + allConsumerRef.set(null); + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java new file mode 100644 index 00000000..8fdf2388 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java @@ -0,0 +1,24 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.serverlessworkflow.impl.events; + +import io.cloudevents.CloudEvent; +import java.util.function.Consumer; + +public record TypeEventRegistration( + String type, Consumer consumer, CloudEventPredicate predicate) + implements EventRegistration {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java new file mode 100644 index 00000000..bd504a76 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java @@ -0,0 +1,20 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.serverlessworkflow.impl.events; + +public record TypeEventRegistrationBuilder(String type, CloudEventPredicate cePredicate) + implements EventRegistrationBuilder {} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java index f5ee1136..f51b7a01 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java @@ -19,98 +19,198 @@ import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.Export; +import io.serverlessworkflow.api.types.FlowDirective; import io.serverlessworkflow.api.types.Input; import io.serverlessworkflow.api.types.Output; import io.serverlessworkflow.api.types.TaskBase; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.WorkflowStatus; import io.serverlessworkflow.impl.jsonschema.SchemaValidator; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.time.Instant; +import java.util.Iterator; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.CompletableFuture; public abstract class AbstractTaskExecutor implements TaskExecutor { protected final T task; + protected final String taskName; + protected final WorkflowPosition position; + private final Optional inputProcessor; + private final Optional outputProcessor; + private final Optional contextProcessor; + private final Optional inputSchemaValidator; + private final Optional outputSchemaValidator; + private final Optional contextSchemaValidator; - private Optional inputProcessor = Optional.empty(); - private Optional outputProcessor = Optional.empty(); - private Optional contextProcessor = Optional.empty(); - private Optional inputSchemaValidator = Optional.empty(); - private Optional outputSchemaValidator = Optional.empty(); - private Optional contextSchemaValidator = Optional.empty(); + public abstract static class AbstractTaskExecutorBuilder + implements TaskExecutorBuilder { + private Optional inputProcessor = Optional.empty(); + private Optional outputProcessor = Optional.empty(); + private Optional contextProcessor = Optional.empty(); + private Optional inputSchemaValidator = Optional.empty(); + private Optional outputSchemaValidator = Optional.empty(); + private Optional contextSchemaValidator = Optional.empty(); + protected final WorkflowPosition position; + protected final T task; + protected final String taskName; + protected final WorkflowApplication application; + protected final Workflow workflow; + protected final ResourceLoader resourceLoader; - protected AbstractTaskExecutor(T task, WorkflowDefinition definition) { - this.task = task; - buildInputProcessors(definition); - buildOutputProcessors(definition); - buildContextProcessors(definition); - } + private TaskExecutor instance; - private void buildInputProcessors(WorkflowDefinition definition) { - if (task.getInput() != null) { - Input input = task.getInput(); - this.inputProcessor = buildWorkflowFilter(definition.expressionFactory(), input.getFrom()); - this.inputSchemaValidator = - getSchemaValidator( - definition.validatorFactory(), definition.resourceLoader(), input.getSchema()); + protected AbstractTaskExecutorBuilder( + WorkflowPosition position, + T task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + this.workflow = workflow; + this.taskName = position.last().toString(); + this.position = position; + this.task = task; + this.application = application; + this.resourceLoader = resourceLoader; + if (task.getInput() != null) { + Input input = task.getInput(); + this.inputProcessor = buildWorkflowFilter(application.expressionFactory(), input.getFrom()); + this.inputSchemaValidator = + getSchemaValidator(application.validatorFactory(), resourceLoader, input.getSchema()); + } + if (task.getOutput() != null) { + Output output = task.getOutput(); + this.outputProcessor = buildWorkflowFilter(application.expressionFactory(), output.getAs()); + this.outputSchemaValidator = + getSchemaValidator(application.validatorFactory(), resourceLoader, output.getSchema()); + } + if (task.getExport() != null) { + Export export = task.getExport(); + if (export.getAs() != null) { + this.contextProcessor = + buildWorkflowFilter(application.expressionFactory(), export.getAs()); + } + this.contextSchemaValidator = + getSchemaValidator(application.validatorFactory(), resourceLoader, export.getSchema()); + } } - } - private void buildOutputProcessors(WorkflowDefinition definition) { - if (task.getOutput() != null) { - Output output = task.getOutput(); - this.outputProcessor = buildWorkflowFilter(definition.expressionFactory(), output.getAs()); - this.outputSchemaValidator = - getSchemaValidator( - definition.validatorFactory(), definition.resourceLoader(), output.getSchema()); + protected final TransitionInfoBuilder next( + FlowDirective flowDirective, Map> connections) { + if (flowDirective == null) { + return TransitionInfoBuilder.of(next(connections)); + } + if (flowDirective.getFlowDirectiveEnum() != null) { + switch (flowDirective.getFlowDirectiveEnum()) { + case CONTINUE: + return TransitionInfoBuilder.of(next(connections)); + case END: + return TransitionInfoBuilder.end(); + case EXIT: + return TransitionInfoBuilder.exit(); + } + } + return TransitionInfoBuilder.of(connections.get(flowDirective.getString())); } - } - private void buildContextProcessors(WorkflowDefinition definition) { - if (task.getExport() != null) { - Export export = task.getExport(); - if (export.getAs() != null) { - this.contextProcessor = buildWorkflowFilter(definition.expressionFactory(), export.getAs()); + private TaskExecutorBuilder next(Map> connections) { + Iterator> iter = connections.values().iterator(); + TaskExecutorBuilder next = null; + while (iter.hasNext()) { + TaskExecutorBuilder item = iter.next(); + if (item == this) { + next = iter.hasNext() ? iter.next() : null; + break; + } } - this.contextSchemaValidator = - getSchemaValidator( - definition.validatorFactory(), definition.resourceLoader(), export.getSchema()); + return next; } + + public TaskExecutor build() { + if (instance == null) { + instance = buildInstance(); + } + return instance; + } + + protected abstract TaskExecutor buildInstance(); } - @Override - public TaskContext apply( - WorkflowContext workflowContext, TaskContext parentContext, JsonNode input) { - TaskContext taskContext = new TaskContext<>(input, parentContext, task); - if (TaskExecutorHelper.isActive(workflowContext)) { + protected AbstractTaskExecutor(AbstractTaskExecutorBuilder builder) { + this.task = builder.task; + this.taskName = builder.taskName; + this.position = builder.position; + this.inputProcessor = builder.inputProcessor; + this.outputProcessor = builder.outputProcessor; + this.contextProcessor = builder.contextProcessor; + this.inputSchemaValidator = builder.inputSchemaValidator; + this.outputSchemaValidator = builder.outputSchemaValidator; + this.contextSchemaValidator = builder.contextSchemaValidator; + } - workflowContext - .definition() - .listeners() - .forEach(l -> l.onTaskStarted(parentContext.position(), task)); + protected final CompletableFuture executeNext( + CompletableFuture future, WorkflowContext workflow) { + return future.thenCompose( + t -> { + TransitionInfo transition = t.transition(); + if (transition.isEndNode()) { + workflow.instance().status(WorkflowStatus.COMPLETED); + } else if (transition.next() != null) { + return transition.next().apply(workflow, t.parent(), t.output()); + } + return CompletableFuture.completedFuture(t); + }); + } - inputSchemaValidator.ifPresent(s -> s.validate(taskContext.rawInput())); - inputProcessor.ifPresent( - p -> taskContext.input(p.apply(workflowContext, taskContext, taskContext.rawInput()))); - internalExecute(workflowContext, taskContext); - outputProcessor.ifPresent( - p -> taskContext.output(p.apply(workflowContext, taskContext, taskContext.rawOutput()))); - outputSchemaValidator.ifPresent(s -> s.validate(taskContext.output())); - contextProcessor.ifPresent( - p -> - workflowContext.context( - p.apply(workflowContext, taskContext, workflowContext.context()))); - contextSchemaValidator.ifPresent(s -> s.validate(workflowContext.context())); - taskContext.completedAt(Instant.now()); - workflowContext - .definition() - .listeners() - .forEach(l -> l.onTaskEnded(parentContext.position(), task)); + @Override + public CompletableFuture apply( + WorkflowContext workflowContext, Optional parentContext, JsonNode input) { + TaskContext taskContext = new TaskContext(input, position, parentContext, taskName, task); + CompletableFuture completable = CompletableFuture.completedFuture(taskContext); + if (!TaskExecutorHelper.isActive(workflowContext)) { + return completable; } - return taskContext; + return executeNext( + completable + .thenApply( + t -> { + workflowContext + .definition() + .listeners() + .forEach(l -> l.onTaskStarted(position, task)); + inputSchemaValidator.ifPresent(s -> s.validate(t.rawInput())); + inputProcessor.ifPresent( + p -> taskContext.input(p.apply(workflowContext, t, t.rawInput()))); + return t; + }) + .thenCompose(t -> execute(workflowContext, t)) + .thenApply( + t -> { + outputProcessor.ifPresent( + p -> t.output(p.apply(workflowContext, t, t.rawOutput()))); + outputSchemaValidator.ifPresent(s -> s.validate(t.output())); + contextProcessor.ifPresent( + p -> + workflowContext.context( + p.apply(workflowContext, t, workflowContext.context()))); + contextSchemaValidator.ifPresent(s -> s.validate(workflowContext.context())); + t.completedAt(Instant.now()); + workflowContext + .definition() + .listeners() + .forEach(l -> l.onTaskEnded(position, task)); + return t; + }), + workflowContext); } - protected abstract void internalExecute(WorkflowContext workflow, TaskContext taskContext); + protected abstract CompletableFuture execute( + WorkflowContext workflow, TaskContext taskContext); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java index 535057fa..2a3d1ae9 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java @@ -15,23 +15,51 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.TaskBase; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.concurrent.CompletableFuture; -public class CallTaskExecutor extends AbstractTaskExecutor { +public class CallTaskExecutor extends RegularTaskExecutor { private final CallableTask callable; - protected CallTaskExecutor(T task, WorkflowDefinition definition, CallableTask callable) { - super(task, definition); - this.callable = callable; - callable.init(task, definition); + public static class CallTaskExecutorBuilder + extends RegularTaskExecutorBuilder { + private CallableTask callable; + + protected CallTaskExecutorBuilder( + WorkflowPosition position, + T task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader, + CallableTask callable) { + super(position, task, workflow, application, resourceLoader); + this.callable = callable; + callable.init(task, application, resourceLoader); + } + + @Override + public TaskExecutor buildInstance() { + return new CallTaskExecutor(this); + } + } + + protected CallTaskExecutor(CallTaskExecutorBuilder builder) { + super(builder); + this.callable = builder.callable; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - taskContext.rawOutput(callable.apply(workflow, taskContext, taskContext.input())); + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + return callable.apply(workflow, taskContext, taskContext.input()); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java index ffb94912..ecff0662 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java @@ -18,13 +18,16 @@ import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.concurrent.CompletableFuture; public interface CallableTask { - void init(T task, WorkflowDefinition definition); + void init(T task, WorkflowApplication application, ResourceLoader loader); - JsonNode apply(WorkflowContext workflowContext, TaskContext taskContext, JsonNode input); + CompletableFuture apply( + WorkflowContext workflowContext, TaskContext taskContext, JsonNode input); boolean accept(Class clazz); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java index e7dd07db..0499fced 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java @@ -23,7 +23,21 @@ import io.serverlessworkflow.api.types.CallTask; import io.serverlessworkflow.api.types.Task; import io.serverlessworkflow.api.types.TaskBase; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.executors.CallTaskExecutor.CallTaskExecutorBuilder; +import io.serverlessworkflow.impl.executors.DoExecutor.DoExecutorBuilder; +import io.serverlessworkflow.impl.executors.EmitExecutor.EmitExecutorBuilder; +import io.serverlessworkflow.impl.executors.ForExecutor.ForExecutorBuilder; +import io.serverlessworkflow.impl.executors.ForkExecutor.ForkExecutorBuilder; +import io.serverlessworkflow.impl.executors.ListenExecutor.ListenExecutorBuilder; +import io.serverlessworkflow.impl.executors.RaiseExecutor.RaiseExecutorBuilder; +import io.serverlessworkflow.impl.executors.SetExecutor.SetExecutorBuilder; +import io.serverlessworkflow.impl.executors.SwitchExecutor.SwitchExecutorBuilder; +import io.serverlessworkflow.impl.executors.TryExecutor.TryExecutorBuilder; +import io.serverlessworkflow.impl.executors.WaitExecutor.WaitExecutorBuilder; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.util.ServiceLoader; import java.util.ServiceLoader.Provider; @@ -39,42 +53,86 @@ protected DefaultTaskExecutorFactory() {} private ServiceLoader callTasks = ServiceLoader.load(CallableTask.class); - public TaskExecutor getTaskExecutor( - Task task, WorkflowDefinition definition) { + @Override + public TaskExecutorBuilder getTaskExecutor( + WorkflowPosition position, + Task task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { if (task.getCallTask() != null) { CallTask callTask = task.getCallTask(); if (callTask.getCallHTTP() != null) { - return new CallTaskExecutor<>( - callTask.getCallHTTP(), definition, findCallTask(CallHTTP.class)); + return new CallTaskExecutorBuilder<>( + position, + callTask.getCallHTTP(), + workflow, + application, + resourceLoader, + findCallTask(CallHTTP.class)); } else if (callTask.getCallAsyncAPI() != null) { - return new CallTaskExecutor<>( - callTask.getCallAsyncAPI(), definition, findCallTask(CallAsyncAPI.class)); + return new CallTaskExecutorBuilder<>( + position, + callTask.getCallAsyncAPI(), + workflow, + application, + resourceLoader, + findCallTask(CallAsyncAPI.class)); } else if (callTask.getCallGRPC() != null) { - return new CallTaskExecutor<>( - callTask.getCallGRPC(), definition, findCallTask(CallGRPC.class)); + return new CallTaskExecutorBuilder<>( + position, + callTask.getCallGRPC(), + workflow, + application, + resourceLoader, + findCallTask(CallGRPC.class)); } else if (callTask.getCallOpenAPI() != null) { - return new CallTaskExecutor<>( - callTask.getCallOpenAPI(), definition, findCallTask(CallOpenAPI.class)); + return new CallTaskExecutorBuilder<>( + position, + callTask.getCallOpenAPI(), + workflow, + application, + resourceLoader, + findCallTask(CallOpenAPI.class)); } else if (callTask.getCallFunction() != null) { - return new CallTaskExecutor<>( - callTask.getCallFunction(), definition, findCallTask(CallFunction.class)); + return new CallTaskExecutorBuilder<>( + position, + callTask.getCallFunction(), + workflow, + application, + resourceLoader, + findCallTask(CallFunction.class)); } } else if (task.getSwitchTask() != null) { - return new SwitchExecutor(task.getSwitchTask(), definition); + return new SwitchExecutorBuilder( + position, task.getSwitchTask(), workflow, application, resourceLoader); } else if (task.getDoTask() != null) { - return new DoExecutor(task.getDoTask(), definition); + return new DoExecutorBuilder( + position, task.getDoTask(), workflow, application, resourceLoader); } else if (task.getSetTask() != null) { - return new SetExecutor(task.getSetTask(), definition); + return new SetExecutorBuilder( + position, task.getSetTask(), workflow, application, resourceLoader); } else if (task.getForTask() != null) { - return new ForExecutor(task.getForTask(), definition); + return new ForExecutorBuilder( + position, task.getForTask(), workflow, application, resourceLoader); } else if (task.getRaiseTask() != null) { - return new RaiseExecutor(task.getRaiseTask(), definition); + return new RaiseExecutorBuilder( + position, task.getRaiseTask(), workflow, application, resourceLoader); } else if (task.getTryTask() != null) { - return new TryExecutor(task.getTryTask(), definition); + return new TryExecutorBuilder( + position, task.getTryTask(), workflow, application, resourceLoader); } else if (task.getForkTask() != null) { - return new ForkExecutor(task.getForkTask(), definition); + return new ForkExecutorBuilder( + position, task.getForkTask(), workflow, application, resourceLoader); } else if (task.getWaitTask() != null) { - return new WaitExecutor(task.getWaitTask(), definition); + return new WaitExecutorBuilder( + position, task.getWaitTask(), workflow, application, resourceLoader); + } else if (task.getListenTask() != null) { + return new ListenExecutorBuilder( + position, task.getListenTask(), workflow, application, resourceLoader); + } else if (task.getEmitTask() != null) { + return new EmitExecutorBuilder( + position, task.getEmitTask(), workflow, application, resourceLoader); } throw new UnsupportedOperationException(task.get().getClass().getName() + " not supported yet"); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java index c5dbc4fd..a35e4a87 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java @@ -15,19 +15,51 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.DoTask; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; -public class DoExecutor extends AbstractTaskExecutor { +public class DoExecutor extends RegularTaskExecutor { - protected DoExecutor(DoTask task, WorkflowDefinition definition) { - super(task, definition); + private final TaskExecutor taskExecutor; + + public static class DoExecutorBuilder extends RegularTaskExecutorBuilder { + private TaskExecutor taskExecutor; + + protected DoExecutorBuilder( + WorkflowPosition position, + DoTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + taskExecutor = + TaskExecutorHelper.createExecutorList( + position, task.getDo(), workflow, application, resourceLoader); + } + + @Override + public TaskExecutor buildInstance() { + return new DoExecutor(this); + } + } + + private DoExecutor(DoExecutorBuilder builder) { + super(builder); + this.taskExecutor = builder.taskExecutor; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - TaskExecutorHelper.processTaskList(task.getDo(), workflow, taskContext); + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + return TaskExecutorHelper.processTaskList( + taskExecutor, workflow, Optional.of(taskContext), taskContext.input()); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java new file mode 100644 index 00000000..7a8eb09d --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java @@ -0,0 +1,217 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +import com.fasterxml.jackson.databind.JsonNode; +import io.cloudevents.CloudEvent; +import io.cloudevents.core.builder.CloudEventBuilder; +import io.cloudevents.jackson.JsonCloudEventData; +import io.serverlessworkflow.api.types.EmitTask; +import io.serverlessworkflow.api.types.EventData; +import io.serverlessworkflow.api.types.EventDataschema; +import io.serverlessworkflow.api.types.EventProperties; +import io.serverlessworkflow.api.types.EventSource; +import io.serverlessworkflow.api.types.EventTime; +import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.ExpressionHolder; +import io.serverlessworkflow.impl.StringFilter; +import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowContext; +import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.events.CloudEventUtils; +import io.serverlessworkflow.impl.expressions.ExpressionFactory; +import io.serverlessworkflow.impl.json.JsonUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.net.URI; +import java.time.OffsetDateTime; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; + +public class EmitExecutor extends RegularTaskExecutor { + + private final EventPropertiesBuilder props; + + public static class EmitExecutorBuilder extends RegularTaskExecutorBuilder { + + private EventPropertiesBuilder eventBuilder; + + protected EmitExecutorBuilder( + WorkflowPosition position, + EmitTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + this.eventBuilder = + EventPropertiesBuilder.build( + task.getEmit().getEvent().getWith(), application.expressionFactory()); + } + + @Override + public TaskExecutor buildInstance() { + return new EmitExecutor(this); + } + } + + private EmitExecutor(EmitExecutorBuilder builder) { + super(builder); + this.props = builder.eventBuilder; + } + + @Override + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + return workflow + .definition() + .application() + .eventPublisher() + .publish(buildCloudEvent(workflow, taskContext)) + .thenApply(v -> taskContext.input()); + } + + private CloudEvent buildCloudEvent(WorkflowContext workflow, TaskContext taskContext) { + io.cloudevents.core.v1.CloudEventBuilder ceBuilder = CloudEventBuilder.v1(); + ceBuilder.withId( + props + .idFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .orElse(UUID.randomUUID().toString())); + ceBuilder.withSource( + props + .sourceFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .map(URI::create) + .orElse(URI.create("reference-impl"))); + ceBuilder.withType( + props + .typeFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .orElseThrow( + () -> new IllegalArgumentException("Type is required for emitting events"))); + props + .timeFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .ifPresent(value -> ceBuilder.withTime(value)); + props + .subjectFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .ifPresent(value -> ceBuilder.withSubject(value)); + props + .dataSchemaFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .ifPresent(value -> ceBuilder.withDataSchema(URI.create(value))); + props + .contentTypeFilter() + .map(filter -> filter.apply(workflow, taskContext)) + .ifPresent(value -> ceBuilder.withDataContentType(value)); + props + .dataFilter() + .map(filter -> filter.apply(workflow, taskContext, taskContext.input())) + .ifPresent(value -> ceBuilder.withData(JsonCloudEventData.wrap(value))); + props + .additionalFilter() + .map(filter -> filter.apply(workflow, taskContext, taskContext.input())) + .ifPresent( + value -> + value + .fields() + .forEachRemaining( + e -> CloudEventUtils.addExtension(ceBuilder, e.getKey(), e.getValue()))); + return ceBuilder.build(); + } + + private static record EventPropertiesBuilder( + Optional idFilter, + Optional sourceFilter, + Optional subjectFilter, + Optional contentTypeFilter, + Optional typeFilter, + Optional dataSchemaFilter, + Optional> timeFilter, + Optional dataFilter, + Optional additionalFilter) { + + public static EventPropertiesBuilder build( + EventProperties properties, ExpressionFactory exprFactory) { + Optional idFilter = buildFilter(exprFactory, properties.getId()); + EventSource source = properties.getSource(); + Optional sourceFilter = + source == null + ? Optional.empty() + : Optional.of( + WorkflowUtils.buildStringFilter( + exprFactory, + source.getRuntimeExpression(), + WorkflowUtils.toString(source.getUriTemplate()))); + Optional subjectFilter = buildFilter(exprFactory, properties.getSubject()); + Optional contentTypeFilter = + buildFilter(exprFactory, properties.getDatacontenttype()); + Optional typeFilter = buildFilter(exprFactory, properties.getType()); + EventDataschema dataSchema = properties.getDataschema(); + Optional dataSchemaFilter = + dataSchema == null + ? Optional.empty() + : Optional.of( + WorkflowUtils.buildStringFilter( + exprFactory, + dataSchema.getExpressionDataSchema(), + WorkflowUtils.toString(dataSchema.getLiteralDataSchema()))); + EventTime time = properties.getTime(); + Optional> timeFilter = + time == null + ? Optional.empty() + : Optional.of( + WorkflowUtils.buildExpressionHolder( + exprFactory, + time.getRuntimeExpression(), + CloudEventUtils.toOffset(time.getLiteralTime()), + JsonUtils::toOffsetDateTime)); + EventData data = properties.getData(); + Optional dataFilter = + properties.getData() == null + ? Optional.empty() + : Optional.of( + WorkflowUtils.buildWorkflowFilter( + exprFactory, data.getRuntimeExpression(), data.getObject())); + Map ceAttrs = properties.getAdditionalProperties(); + Optional additionalFilter = + ceAttrs == null || ceAttrs.isEmpty() + ? Optional.empty() + : Optional.of(WorkflowUtils.buildWorkflowFilter(exprFactory, null, ceAttrs)); + return new EventPropertiesBuilder( + idFilter, + sourceFilter, + subjectFilter, + contentTypeFilter, + typeFilter, + dataSchemaFilter, + timeFilter, + dataFilter, + additionalFilter); + } + + private static Optional buildFilter(ExpressionFactory exprFactory, String str) { + return str == null + ? Optional.empty() + : Optional.of(WorkflowUtils.buildStringFilter(exprFactory, str)); + } + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java index cb4ecec0..8f7e04f1 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java @@ -18,32 +18,67 @@ import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.ForTask; import io.serverlessworkflow.api.types.ForTaskConfiguration; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.util.Iterator; import java.util.Optional; +import java.util.concurrent.CompletableFuture; -public class ForExecutor extends AbstractTaskExecutor { +public class ForExecutor extends RegularTaskExecutor { private final WorkflowFilter collectionExpr; private final Optional whileExpr; + private final TaskExecutor taskExecutor; - protected ForExecutor(ForTask task, WorkflowDefinition definition) { - super(task, definition); - ForTaskConfiguration forConfig = task.getFor(); - this.collectionExpr = - WorkflowUtils.buildWorkflowFilter(definition.expressionFactory(), forConfig.getIn()); - this.whileExpr = WorkflowUtils.optionalFilter(definition.expressionFactory(), task.getWhile()); + public static class ForExecutorBuilder extends RegularTaskExecutorBuilder { + private WorkflowFilter collectionExpr; + private Optional whileExpr; + private TaskExecutor taskExecutor; + + protected ForExecutorBuilder( + WorkflowPosition position, + ForTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + ForTaskConfiguration forConfig = task.getFor(); + this.collectionExpr = + WorkflowUtils.buildWorkflowFilter(application.expressionFactory(), forConfig.getIn()); + this.whileExpr = + WorkflowUtils.optionalFilter(application.expressionFactory(), task.getWhile()); + this.taskExecutor = + TaskExecutorHelper.createExecutorList( + position, task.getDo(), workflow, application, resourceLoader); + } + + @Override + public TaskExecutor buildInstance() { + return new ForExecutor(this); + } + } + + protected ForExecutor(ForExecutorBuilder builder) { + super(builder); + this.collectionExpr = builder.collectionExpr; + this.whileExpr = builder.whileExpr; + this.taskExecutor = builder.taskExecutor; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { Iterator iter = collectionExpr.apply(workflow, taskContext, taskContext.input()).iterator(); int i = 0; + CompletableFuture future = CompletableFuture.completedFuture(taskContext.input()); while (iter.hasNext() && whileExpr .map(w -> w.apply(workflow, taskContext, taskContext.rawOutput())) @@ -52,7 +87,12 @@ protected void internalExecute(WorkflowContext workflow, TaskContext ta JsonNode item = iter.next(); taskContext.variables().put(task.getFor().getEach(), item); taskContext.variables().put(task.getFor().getAt(), i++); - TaskExecutorHelper.processTaskList(task.getDo(), workflow, taskContext); + future = + future.thenCompose( + input -> + TaskExecutorHelper.processTaskList( + taskExecutor, workflow, Optional.of(taskContext), input)); } + return future; } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java index e0ce3b02..85bd3f22 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java @@ -16,95 +16,95 @@ package io.serverlessworkflow.impl.executors; import com.fasterxml.jackson.databind.JsonNode; -import io.serverlessworkflow.api.types.FlowDirectiveEnum; import io.serverlessworkflow.api.types.ForkTask; import io.serverlessworkflow.api.types.ForkTaskConfiguration; -import io.serverlessworkflow.api.types.TaskItem; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; -import io.serverlessworkflow.impl.WorkflowStatus; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder; import io.serverlessworkflow.impl.json.JsonUtils; -import java.lang.reflect.UndeclaredThrowableException; -import java.util.ArrayList; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; +import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -public class ForkExecutor extends AbstractTaskExecutor { +public class ForkExecutor extends RegularTaskExecutor { - private static final Logger logger = LoggerFactory.getLogger(ForkExecutor.class); private final ExecutorService service; + private final Map> taskExecutors; + private final boolean compete; - protected ForkExecutor(ForkTask task, WorkflowDefinition definition) { - super(task, definition); - service = definition.executorService(); - } + public static class ForkExecutorBuilder extends RegularTaskExecutorBuilder { - @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - ForkTaskConfiguration forkConfig = task.getFork(); + private final Map> taskExecutors; + private final boolean compete; - if (!forkConfig.getBranches().isEmpty()) { - Map>> futures = new HashMap<>(); - int index = 0; - for (TaskItem item : forkConfig.getBranches()) { - final int i = index++; - futures.put( - item.getName(), - service.submit(() -> executeBranch(workflow, taskContext.copy(), item, i))); - } - List>> results = new ArrayList<>(); - for (Map.Entry>> entry : futures.entrySet()) { - try { - results.add(Map.entry(entry.getKey(), entry.getValue().get())); - } catch (ExecutionException ex) { - Throwable cause = ex.getCause(); - if (cause instanceof RuntimeException) { - throw (RuntimeException) cause; - } else { - throw new UndeclaredThrowableException(ex); - } - } catch (InterruptedException ex) { - logger.warn("Branch {} was interrupted, no result will be recorded", entry.getKey(), ex); - } - } - if (!results.isEmpty()) { - Stream>> sortedStream = - results.stream() - .sorted( - (arg1, arg2) -> - arg1.getValue().completedAt().compareTo(arg2.getValue().completedAt())); - taskContext.rawOutput( - forkConfig.isCompete() - ? sortedStream.map(e -> e.getValue().output()).findFirst().orElseThrow() - : sortedStream - .map( - e -> - JsonUtils.mapper() - .createObjectNode() - .set(e.getKey(), e.getValue().output())) - .collect(JsonUtils.arrayNodeCollector())); - } + protected ForkExecutorBuilder( + WorkflowPosition position, + ForkTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + ForkTaskConfiguration forkConfig = task.getFork(); + this.taskExecutors = + TaskExecutorHelper.createBranchList( + position, forkConfig.getBranches(), workflow, application, resourceLoader); + this.compete = forkConfig.isCompete(); } + + @Override + public TaskExecutor buildInstance() { + return new ForkExecutor(this); + } + } + + protected ForkExecutor(ForkExecutorBuilder builder) { + super(builder); + service = builder.application.executorService(); + this.taskExecutors = builder.taskExecutors; + this.compete = builder.compete; } - private TaskContext executeBranch( - WorkflowContext workflow, TaskContext taskContext, TaskItem taskItem, int index) { - taskContext.position().addIndex(index); - TaskContext result = - TaskExecutorHelper.executeTask(workflow, taskContext, taskItem, taskContext.input()); - if (result.flowDirective() != null - && result.flowDirective().getFlowDirectiveEnum() == FlowDirectiveEnum.END) { - workflow.instance().status(WorkflowStatus.COMPLETED); + @Override + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + Map> futures = new HashMap<>(); + CompletableFuture initial = CompletableFuture.completedFuture(taskContext); + for (Map.Entry> entry : taskExecutors.entrySet()) { + futures.put( + entry.getKey(), + initial.thenComposeAsync( + t -> entry.getValue().apply(workflow, Optional.of(t), t.input()), service)); } - taskContext.position().back(); - return result; + return CompletableFuture.allOf( + futures.values().toArray(new CompletableFuture[futures.size()])) + .thenApply( + i -> + combine( + futures.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, e -> e.getValue().join())))); + } + + private JsonNode combine(Map futures) { + + Stream> sortedStream = + futures.entrySet().stream() + .sorted( + (arg1, arg2) -> + arg1.getValue().completedAt().compareTo(arg2.getValue().completedAt())); + return compete + ? sortedStream.map(e -> e.getValue().output()).findFirst().orElseThrow() + : sortedStream + .map( + e -> JsonUtils.mapper().createObjectNode().set(e.getKey(), e.getValue().output())) + .collect(JsonUtils.arrayNodeCollector()); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java new file mode 100644 index 00000000..e351bae2 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java @@ -0,0 +1,314 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import io.cloudevents.CloudEvent; +import io.serverlessworkflow.api.types.AllEventConsumptionStrategy; +import io.serverlessworkflow.api.types.AnyEventConsumptionStrategy; +import io.serverlessworkflow.api.types.EventConsumptionStrategy; +import io.serverlessworkflow.api.types.EventFilter; +import io.serverlessworkflow.api.types.ListenTask; +import io.serverlessworkflow.api.types.ListenTaskConfiguration; +import io.serverlessworkflow.api.types.ListenTaskConfiguration.ListenAndReadAs; +import io.serverlessworkflow.api.types.ListenTo; +import io.serverlessworkflow.api.types.OneEventConsumptionStrategy; +import io.serverlessworkflow.api.types.SubscriptionIterator; +import io.serverlessworkflow.api.types.Until; +import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowContext; +import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.WorkflowStatus; +import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.events.CloudEventUtils; +import io.serverlessworkflow.impl.events.EventConsumer; +import io.serverlessworkflow.impl.events.EventRegistration; +import io.serverlessworkflow.impl.events.EventRegistrationBuilder; +import io.serverlessworkflow.impl.json.JsonUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.stream.Collectors; + +public abstract class ListenExecutor extends RegularTaskExecutor { + + protected final EventRegistrationBuilderCollection regBuilders; + protected final Optional> loop; + protected final Function converter; + protected final EventConsumer eventConsumer; + + private static record EventRegistrationBuilderCollection( + Collection registrations, boolean isAnd) {} + + public static class ListenExecutorBuilder extends RegularTaskExecutorBuilder { + + private EventRegistrationBuilderCollection registrations; + private WorkflowFilter until; + private EventRegistrationBuilderCollection untilRegistrations; + private TaskExecutor loop; + private Function converter = this::defaultCEConverter; + + private EventRegistrationBuilderCollection allEvents(AllEventConsumptionStrategy allStrategy) { + return new EventRegistrationBuilderCollection(from(allStrategy.getAll()), true); + } + + private EventRegistrationBuilderCollection anyEvents(AnyEventConsumptionStrategy anyStrategy) { + List eventFilters = anyStrategy.getAny(); + return new EventRegistrationBuilderCollection( + eventFilters.isEmpty() ? registerToAll() : from(eventFilters), false); + } + + private EventRegistrationBuilderCollection oneEvent(OneEventConsumptionStrategy oneStrategy) { + return new EventRegistrationBuilderCollection(List.of(from(oneStrategy.getOne())), true); + } + + protected ListenExecutorBuilder( + WorkflowPosition position, + ListenTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + ListenTaskConfiguration listen = task.getListen(); + ListenTo to = listen.getTo(); + if (to.getAllEventConsumptionStrategy() != null) { + registrations = allEvents(to.getAllEventConsumptionStrategy()); + } else if (to.getAnyEventConsumptionStrategy() != null) { + AnyEventConsumptionStrategy any = to.getAnyEventConsumptionStrategy(); + registrations = anyEvents(any); + Until untilDesc = any.getUntil(); + if (untilDesc != null) { + if (untilDesc.getAnyEventUntilCondition() != null) { + until = + WorkflowUtils.buildWorkflowFilter( + application.expressionFactory(), untilDesc.getAnyEventUntilCondition()); + } else if (untilDesc.getAnyEventUntilConsumed() != null) { + EventConsumptionStrategy strategy = untilDesc.getAnyEventUntilConsumed(); + if (strategy.getAllEventConsumptionStrategy() != null) { + untilRegistrations = allEvents(strategy.getAllEventConsumptionStrategy()); + } else if (strategy.getAnyEventConsumptionStrategy() != null) { + untilRegistrations = anyEvents(strategy.getAnyEventConsumptionStrategy()); + } else if (strategy.getOneEventConsumptionStrategy() != null) { + untilRegistrations = oneEvent(strategy.getOneEventConsumptionStrategy()); + } + } + } + } else if (to.getOneEventConsumptionStrategy() != null) { + registrations = oneEvent(to.getOneEventConsumptionStrategy()); + } + SubscriptionIterator forEach = task.getForeach(); + if (forEach != null) { + loop = + TaskExecutorHelper.createExecutorList( + position, forEach.getDo(), workflow, application, resourceLoader); + } + ListenAndReadAs readAs = listen.getRead(); + if (readAs != null) { + switch (readAs) { + case ENVELOPE: + converter = CloudEventUtils::toJsonNode; + default: + case DATA: + converter = this::defaultCEConverter; + break; + } + } + } + + private Collection registerToAll() { + return application.eventConsumer().listenToAll(application); + } + + private JsonNode defaultCEConverter(CloudEvent ce) { + return CloudEventUtils.toJsonNode(ce.getData()); + } + + private Collection from(List filters) { + return filters.stream().map(this::from).collect(Collectors.toList()); + } + + private EventRegistrationBuilder from(EventFilter filter) { + return application.eventConsumer().listen(filter, application); + } + + @Override + public TaskExecutor buildInstance() { + return registrations.isAnd() ? new AndListenExecutor(this) : new OrListenExecutor(this); + } + } + + public static class AndListenExecutor extends ListenExecutor { + + public AndListenExecutor(ListenExecutorBuilder builder) { + super(builder); + } + + protected void internalProcessCe( + JsonNode node, + ArrayNode arrayNode, + WorkflowContext workflow, + TaskContext taskContext, + CompletableFuture future) { + arrayNode.add(node); + future.complete(node); + } + } + + public static class OrListenExecutor extends ListenExecutor { + + private final Optional until; + private final EventRegistrationBuilderCollection untilRegBuilders; + + public OrListenExecutor(ListenExecutorBuilder builder) { + super(builder); + this.until = Optional.ofNullable(builder.until); + this.untilRegBuilders = builder.untilRegistrations; + } + + @Override + protected CompletableFuture buildFuture( + EventRegistrationBuilderCollection regCollection, + Collection registrations, + BiConsumer> consumer) { + CompletableFuture combinedFuture = + super.buildFuture(regCollection, registrations, consumer); + if (untilRegBuilders != null) { + Collection untilRegistrations = new ArrayList<>(); + CompletableFuture untilFuture = + combine(untilRegBuilders, untilRegistrations, (ce, f) -> f.complete(null)); + untilFuture.thenAccept( + v -> { + combinedFuture.complete(null); + untilRegistrations.forEach(reg -> eventConsumer.unregister(reg)); + }); + } + return combinedFuture; + } + + protected void internalProcessCe( + JsonNode node, + ArrayNode arrayNode, + WorkflowContext workflow, + TaskContext taskContext, + CompletableFuture future) { + arrayNode.add(node); + if ((until.isEmpty() + || until + .filter(u -> u.apply(workflow, taskContext, arrayNode).asBoolean()) + .isPresent()) + && untilRegBuilders == null) { + future.complete(node); + } + } + } + + protected abstract void internalProcessCe( + JsonNode node, + ArrayNode arrayNode, + WorkflowContext workflow, + TaskContext taskContext, + CompletableFuture future); + + @Override + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + ArrayNode output = JsonUtils.mapper().createArrayNode(); + Collection registrations = new ArrayList<>(); + workflow.instance().status(WorkflowStatus.WAITING); + return buildFuture( + regBuilders, + registrations, + (BiConsumer>) + ((ce, future) -> + processCe(converter.apply(ce), output, workflow, taskContext, future))) + .thenApply( + v -> { + workflow.instance().status(WorkflowStatus.RUNNING); + registrations.forEach(reg -> eventConsumer.unregister(reg)); + return output; + }); + } + + protected CompletableFuture buildFuture( + EventRegistrationBuilderCollection regCollection, + Collection registrations, + BiConsumer> consumer) { + return combine(regCollection, registrations, consumer); + } + + protected final CompletableFuture combine( + EventRegistrationBuilderCollection regCollection, + Collection registrations, + BiConsumer> consumer) { + CompletableFuture[] futures = + regCollection.registrations().stream() + .map(reg -> toCompletable(reg, registrations, consumer)) + .toArray(size -> new CompletableFuture[size]); + return regCollection.isAnd() + ? CompletableFuture.allOf(futures) + : CompletableFuture.anyOf(futures); + } + + private CompletableFuture toCompletable( + EventRegistrationBuilder regBuilder, + Collection registrations, + BiConsumer> ceConsumer) { + final CompletableFuture future = new CompletableFuture<>(); + registrations.add( + eventConsumer.register(regBuilder, ce -> ceConsumer.accept((CloudEvent) ce, future))); + return future; + } + + private void processCe( + JsonNode node, + ArrayNode arrayNode, + WorkflowContext workflow, + TaskContext taskContext, + CompletableFuture future) { + loop.ifPresentOrElse( + t -> { + SubscriptionIterator forEach = task.getForeach(); + String item = forEach.getItem(); + if (item != null) { + taskContext.variables().put(item, node); + } + String at = forEach.getAt(); + if (at != null) { + taskContext.variables().put(at, arrayNode.size()); + } + TaskExecutorHelper.processTaskList(t, workflow, Optional.of(taskContext), node) + .thenAccept(n -> internalProcessCe(n, arrayNode, workflow, taskContext, future)); + }, + () -> internalProcessCe(node, arrayNode, workflow, taskContext, future)); + } + + protected ListenExecutor(ListenExecutorBuilder builder) { + super(builder); + this.eventConsumer = builder.application.eventConsumer(); + this.regBuilders = builder.registrations; + this.loop = Optional.ofNullable(builder.loop); + this.converter = builder.converter; + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RaiseExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RaiseExecutor.java index 1ddc315f..6dd43c2b 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RaiseExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RaiseExecutor.java @@ -15,90 +15,116 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.Error; import io.serverlessworkflow.api.types.ErrorInstance; import io.serverlessworkflow.api.types.ErrorType; import io.serverlessworkflow.api.types.RaiseTask; import io.serverlessworkflow.api.types.RaiseTaskError; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.StringFilter; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowError; import io.serverlessworkflow.impl.WorkflowException; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder; import io.serverlessworkflow.impl.expressions.ExpressionFactory; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.util.Map; import java.util.Optional; +import java.util.concurrent.CompletableFuture; import java.util.function.BiFunction; -public class RaiseExecutor extends AbstractTaskExecutor { +public class RaiseExecutor extends RegularTaskExecutor { - private final BiFunction, WorkflowError> errorBuilder; + private final BiFunction errorBuilder; - private final StringFilter typeFilter; - private final Optional instanceFilter; - private final StringFilter titleFilter; - private final StringFilter detailFilter; + public static class RaiseExecutorBuilder extends RegularTaskExecutorBuilder { - protected RaiseExecutor(RaiseTask task, WorkflowDefinition definition) { - super(task, definition); - RaiseTaskError raiseError = task.getRaise().getError(); - Error error = - raiseError.getRaiseErrorDefinition() != null - ? raiseError.getRaiseErrorDefinition() - : findError(definition, raiseError.getRaiseErrorReference()); - this.typeFilter = getTypeFunction(definition.expressionFactory(), error.getType()); - this.instanceFilter = getInstanceFunction(definition.expressionFactory(), error.getInstance()); - this.titleFilter = - WorkflowUtils.buildStringFilter(definition.expressionFactory(), error.getTitle()); - this.detailFilter = - WorkflowUtils.buildStringFilter(definition.expressionFactory(), error.getDetail()); - this.errorBuilder = (w, t) -> buildError(error, w, t); - } + private final BiFunction errorBuilder; + private final StringFilter typeFilter; + private final Optional instanceFilter; + private final StringFilter titleFilter; + private final StringFilter detailFilter; - private static Error findError(WorkflowDefinition definition, String raiseErrorReference) { - Map errorsMap = - definition.workflow().getUse().getErrors().getAdditionalProperties(); - Error error = errorsMap.get(raiseErrorReference); - if (error == null) { - throw new IllegalArgumentException("Error " + error + "is not defined in " + errorsMap); + protected RaiseExecutorBuilder( + WorkflowPosition position, + RaiseTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + RaiseTaskError raiseError = task.getRaise().getError(); + Error error = + raiseError.getRaiseErrorDefinition() != null + ? raiseError.getRaiseErrorDefinition() + : findError(raiseError.getRaiseErrorReference()); + this.typeFilter = getTypeFunction(application.expressionFactory(), error.getType()); + this.instanceFilter = + getInstanceFunction(application.expressionFactory(), error.getInstance()); + this.titleFilter = + WorkflowUtils.buildStringFilter(application.expressionFactory(), error.getTitle()); + this.detailFilter = + WorkflowUtils.buildStringFilter(application.expressionFactory(), error.getDetail()); + this.errorBuilder = (w, t) -> buildError(error, w, t); } - return error; - } - private WorkflowError buildError( - Error error, WorkflowContext context, TaskContext taskContext) { - return WorkflowError.error(typeFilter.apply(context, taskContext), error.getStatus()) - .instance( - instanceFilter - .map(f -> f.apply(context, taskContext)) - .orElseGet(() -> taskContext.position().jsonPointer())) - .title(titleFilter.apply(context, taskContext)) - .details(detailFilter.apply(context, taskContext)) - .build(); - } + private WorkflowError buildError( + Error error, WorkflowContext context, TaskContext taskContext) { + return WorkflowError.error(typeFilter.apply(context, taskContext), error.getStatus()) + .instance( + instanceFilter + .map(f -> f.apply(context, taskContext)) + .orElseGet(() -> taskContext.position().jsonPointer())) + .title(titleFilter.apply(context, taskContext)) + .details(detailFilter.apply(context, taskContext)) + .build(); + } + + private Optional getInstanceFunction( + ExpressionFactory expressionFactory, ErrorInstance errorInstance) { + return errorInstance != null + ? Optional.of( + WorkflowUtils.buildStringFilter( + expressionFactory, + errorInstance.getExpressionErrorInstance(), + errorInstance.getLiteralErrorInstance())) + : Optional.empty(); + } - private Optional getInstanceFunction( - ExpressionFactory expressionFactory, ErrorInstance errorInstance) { - return errorInstance != null - ? Optional.of( - WorkflowUtils.buildStringFilter( - expressionFactory, - errorInstance.getExpressionErrorInstance(), - errorInstance.getLiteralErrorInstance())) - : Optional.empty(); + private StringFilter getTypeFunction(ExpressionFactory expressionFactory, ErrorType type) { + return WorkflowUtils.buildStringFilter( + expressionFactory, + type.getExpressionErrorType(), + type.getLiteralErrorType().get().toString()); + } + + private Error findError(String raiseErrorReference) { + Map errorsMap = workflow.getUse().getErrors().getAdditionalProperties(); + Error error = errorsMap.get(raiseErrorReference); + if (error == null) { + throw new IllegalArgumentException("Error " + error + "is not defined in " + errorsMap); + } + return error; + } + + @Override + public TaskExecutor buildInstance() { + return new RaiseExecutor(this); + } } - private StringFilter getTypeFunction(ExpressionFactory expressionFactory, ErrorType type) { - return WorkflowUtils.buildStringFilter( - expressionFactory, - type.getExpressionErrorType(), - type.getLiteralErrorType().get().toString()); + protected RaiseExecutor(RaiseExecutorBuilder builder) { + super(builder); + this.errorBuilder = builder.errorBuilder; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { throw new WorkflowException(errorBuilder.apply(workflow, taskContext)); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RegularTaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RegularTaskExecutor.java new file mode 100644 index 00000000..24c1e841 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/RegularTaskExecutor.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +import com.fasterxml.jackson.databind.JsonNode; +import io.serverlessworkflow.api.types.TaskBase; +import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowContext; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +public abstract class RegularTaskExecutor extends AbstractTaskExecutor { + + protected final TransitionInfo transition; + + protected RegularTaskExecutor(RegularTaskExecutorBuilder builder) { + super(builder); + this.transition = TransitionInfo.build(builder.transition); + } + + public abstract static class RegularTaskExecutorBuilder + extends AbstractTaskExecutorBuilder { + + private TransitionInfoBuilder transition; + + protected RegularTaskExecutorBuilder( + WorkflowPosition position, + T task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + } + + public void connect(Map> connections) { + this.transition = next(task.getThen(), connections); + } + } + + protected CompletableFuture execute( + WorkflowContext workflow, TaskContext taskContext) { + CompletableFuture future = + internalExecute(workflow, taskContext) + .thenApply(node -> taskContext.rawOutput(node).transition(transition)); + return future; + } + + protected abstract CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext task); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SetExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SetExecutor.java index 0f0d999e..c5600891 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SetExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SetExecutor.java @@ -15,33 +15,56 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.SetTask; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.expressions.ExpressionUtils; import io.serverlessworkflow.impl.json.JsonUtils; -import io.serverlessworkflow.impl.json.MergeUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.util.Map; +import java.util.concurrent.CompletableFuture; -public class SetExecutor extends AbstractTaskExecutor { +public class SetExecutor extends RegularTaskExecutor { - private Map toBeSet; + private final Map toBeSet; - protected SetExecutor(SetTask task, WorkflowDefinition definition) { - super(task, definition); - this.toBeSet = - ExpressionUtils.buildExpressionMap( - task.getSet().getAdditionalProperties(), definition.expressionFactory()); + public static class SetExecutorBuilder extends RegularTaskExecutorBuilder { + + private final Map toBeSet; + + protected SetExecutorBuilder( + WorkflowPosition position, + SetTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + this.toBeSet = + ExpressionUtils.buildExpressionMap( + task.getSet().getAdditionalProperties(), application.expressionFactory()); + } + + @Override + public TaskExecutor buildInstance() { + return new SetExecutor(this); + } + } + + private SetExecutor(SetExecutorBuilder builder) { + super(builder); + this.toBeSet = builder.toBeSet; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - taskContext.rawOutput( - MergeUtils.merge( - JsonUtils.fromValue( - ExpressionUtils.evaluateExpressionMap( - toBeSet, workflow, taskContext, taskContext.input())), - taskContext.input())); + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + return CompletableFuture.completedFuture( + JsonUtils.fromValue( + ExpressionUtils.evaluateExpressionMap( + toBeSet, workflow, taskContext, taskContext.input()))); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SwitchExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SwitchExecutor.java index dee0cee7..70b127c4 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SwitchExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/SwitchExecutor.java @@ -19,43 +19,83 @@ import io.serverlessworkflow.api.types.SwitchCase; import io.serverlessworkflow.api.types.SwitchItem; import io.serverlessworkflow.api.types.SwitchTask; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; -import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class SwitchExecutor extends AbstractTaskExecutor { - private Map workflowFilters = new ConcurrentHashMap<>(); - private FlowDirective defaultDirective; - - protected SwitchExecutor(SwitchTask task, WorkflowDefinition definition) { - super(task, definition); - for (SwitchItem item : task.getSwitch()) { - SwitchCase switchCase = item.getSwitchCase(); - if (switchCase.getWhen() != null) { - workflowFilters.put( - switchCase, - WorkflowUtils.buildWorkflowFilter( - definition.expressionFactory(), switchCase.getWhen())); - } else { - defaultDirective = switchCase.getThen(); + private final Map workflowFilters; + private final TransitionInfo defaultTask; + + public static class SwitchExecutorBuilder extends AbstractTaskExecutorBuilder { + private final Map workflowFilters = new HashMap<>(); + private Map switchFilters = new HashMap<>(); + private FlowDirective defaultDirective; + private TransitionInfoBuilder defaultTask; + + public SwitchExecutorBuilder( + WorkflowPosition position, + SwitchTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + for (SwitchItem item : task.getSwitch()) { + SwitchCase switchCase = item.getSwitchCase(); + if (switchCase.getWhen() != null) { + workflowFilters.put( + switchCase, + WorkflowUtils.buildWorkflowFilter( + application.expressionFactory(), switchCase.getWhen())); + } else { + defaultDirective = switchCase.getThen(); + } } } + + @Override + public void connect(Map> connections) { + this.switchFilters = + this.workflowFilters.entrySet().stream() + .collect( + Collectors.toMap(Entry::getValue, e -> next(e.getKey().getThen(), connections))); + this.defaultTask = next(defaultDirective, connections); + } + + @Override + protected TaskExecutor buildInstance() { + return new SwitchExecutor(this); + } + } + + private SwitchExecutor(SwitchExecutorBuilder builder) { + super(builder); + this.defaultTask = TransitionInfo.build(builder.defaultTask); + this.workflowFilters = + builder.switchFilters.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, e -> TransitionInfo.build(e.getValue()))); } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - for (Entry entry : workflowFilters.entrySet()) { - if (entry.getValue().apply(workflow, taskContext, taskContext.input()).asBoolean()) { - taskContext.flowDirective(entry.getKey().getThen()); - return; + protected CompletableFuture execute( + WorkflowContext workflow, TaskContext taskContext) { + CompletableFuture future = CompletableFuture.completedFuture(taskContext); + for (Entry entry : workflowFilters.entrySet()) { + if (entry.getKey().apply(workflow, taskContext, taskContext.input()).asBoolean()) { + return future.thenApply(t -> t.transition(entry.getValue())); } } - taskContext.flowDirective(defaultDirective); + return future.thenApply(t -> t.transition(defaultTask)); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutor.java index b4b66a9a..b77398c3 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutor.java @@ -19,9 +19,11 @@ import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.impl.TaskContext; import io.serverlessworkflow.impl.WorkflowContext; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; @FunctionalInterface public interface TaskExecutor { - TaskContext apply( - WorkflowContext workflowContext, TaskContext parentContext, JsonNode input); + CompletableFuture apply( + WorkflowContext workflowContext, Optional parentContext, JsonNode input); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorBuilder.java new file mode 100644 index 00000000..2cbb8c16 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorBuilder.java @@ -0,0 +1,26 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +import io.serverlessworkflow.api.types.TaskBase; +import java.util.Map; + +public interface TaskExecutorBuilder { + + void connect(Map> connections); + + TaskExecutor build(); +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorFactory.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorFactory.java index 8c399cf6..b1be3429 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorFactory.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorFactory.java @@ -17,8 +17,16 @@ import io.serverlessworkflow.api.types.Task; import io.serverlessworkflow.api.types.TaskBase; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.api.types.Workflow; +import io.serverlessworkflow.impl.WorkflowApplication; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.resources.ResourceLoader; public interface TaskExecutorFactory { - TaskExecutor getTaskExecutor(Task task, WorkflowDefinition definition); + TaskExecutorBuilder getTaskExecutor( + WorkflowPosition position, + Task task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorHelper.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorHelper.java index e16cb085..3fa77f5f 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorHelper.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TaskExecutorHelper.java @@ -16,50 +16,37 @@ package io.serverlessworkflow.impl.executors; import com.fasterxml.jackson.databind.JsonNode; -import io.serverlessworkflow.api.types.FlowDirective; -import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.api.types.TaskItem; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.WorkflowStatus; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; -import java.util.ListIterator; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class TaskExecutorHelper { private TaskExecutorHelper() {} - public static void processTaskList( - List tasks, WorkflowContext context, TaskContext parentTask) { - parentTask.position().addProperty("do"); - TaskContext currentContext = parentTask; - if (!tasks.isEmpty()) { - ListIterator iter = tasks.listIterator(); - TaskItem nextTask = iter.next(); - while (nextTask != null && isActive(context)) { - TaskItem task = nextTask; - parentTask.position().addIndex(iter.previousIndex()); - currentContext = executeTask(context, parentTask, task, currentContext.output()); - FlowDirective flowDirective = currentContext.flowDirective(); - if (flowDirective.getFlowDirectiveEnum() != null) { - switch (flowDirective.getFlowDirectiveEnum()) { - case CONTINUE: - nextTask = iter.hasNext() ? iter.next() : null; - break; - case END: - context.instance().status(WorkflowStatus.COMPLETED); - break; - case EXIT: - nextTask = null; - break; - } - } else { - nextTask = findTaskByName(iter, flowDirective.getString()); - } - parentTask.position().back(); - } - } - parentTask.position().back(); - parentTask.rawOutput(currentContext.output()); + public static CompletableFuture processTaskList( + TaskExecutor taskExecutor, + WorkflowContext context, + Optional parentTask, + JsonNode input) { + return taskExecutor + .apply(context, parentTask, input) + .thenApply( + t -> { + parentTask.ifPresent(p -> p.rawOutput(t.output())); + return t.output(); + }); } public static boolean isActive(WorkflowContext context) { @@ -67,45 +54,58 @@ public static boolean isActive(WorkflowContext context) { } public static boolean isActive(WorkflowStatus status) { - return status == WorkflowStatus.RUNNING; + return status == WorkflowStatus.RUNNING || status == WorkflowStatus.WAITING; } - public static TaskContext executeTask( - WorkflowContext context, TaskContext parentTask, TaskItem task, JsonNode input) { - parentTask.position().addProperty(task.getName()); - TaskContext result = - context - .definition() - .taskExecutors() - .computeIfAbsent( - parentTask.position().jsonPointer(), - k -> - context - .definition() - .taskFactory() - .getTaskExecutor(task.getTask(), context.definition())) - .apply(context, parentTask, input); - parentTask.position().back(); - return result; + public static TaskExecutor createExecutorList( + WorkflowPosition position, + List taskItems, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + Map> executors = + createExecutorBuilderList(position, taskItems, workflow, application, resourceLoader, "do"); + executors.values().forEach(t -> t.connect(executors)); + Iterator> iter = executors.values().iterator(); + TaskExecutor first = iter.next().build(); + while (iter.hasNext()) { + iter.next().build(); + } + return first; } - private static TaskItem findTaskByName(ListIterator iter, String taskName) { - int currentIndex = iter.nextIndex(); - while (iter.hasPrevious()) { - TaskItem item = iter.previous(); - if (item.getName().equals(taskName)) { - return item; - } - } - while (iter.nextIndex() < currentIndex) { - iter.next(); - } - while (iter.hasNext()) { - TaskItem item = iter.next(); - if (item.getName().equals(taskName)) { - return item; - } + public static Map> createBranchList( + WorkflowPosition position, + List taskItems, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + return createExecutorBuilderList( + position, taskItems, workflow, application, resourceLoader, "branch") + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().build())); + } + + private static Map> createExecutorBuilderList( + WorkflowPosition position, + List taskItems, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader, + String containerName) { + TaskExecutorFactory taskFactory = application.taskFactory(); + Map> executors = new LinkedHashMap<>(); + position.addProperty(containerName); + int index = 0; + for (TaskItem item : taskItems) { + position.addIndex(index++).addProperty(item.getName()); + TaskExecutorBuilder taskExecutorBuilder = + taskFactory.getTaskExecutor( + position.copy(), item.getTask(), workflow, application, resourceLoader); + executors.put(item.getName(), taskExecutorBuilder); + position.back().back(); } - throw new IllegalArgumentException("Cannot find task with name " + taskName); + return executors; } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfo.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfo.java new file mode 100644 index 00000000..f330ac74 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfo.java @@ -0,0 +1,27 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +public record TransitionInfo(TaskExecutor next, boolean isEndNode) { + private static final TransitionInfo END = new TransitionInfo(null, true); + private static final TransitionInfo EXIT = new TransitionInfo(null, false); + + static TransitionInfo build(TransitionInfoBuilder builder) { + if (builder == null || builder == TransitionInfoBuilder.exit()) return EXIT; + if (builder == TransitionInfoBuilder.end()) return END; + return new TransitionInfo(builder.next().build(), false); + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfoBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfoBuilder.java new file mode 100644 index 00000000..7f62eaf7 --- /dev/null +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TransitionInfoBuilder.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl.executors; + +public record TransitionInfoBuilder(TaskExecutorBuilder next, boolean isEndNode) { + + private static final TransitionInfoBuilder END = new TransitionInfoBuilder(null, true); + private static final TransitionInfoBuilder EXIT = new TransitionInfoBuilder(null, false); + + static TransitionInfoBuilder of(TaskExecutorBuilder next) { + return next == null ? EXIT : new TransitionInfoBuilder(next, false); + } + + static TransitionInfoBuilder end() { + return END; + } + + static TransitionInfoBuilder exit() { + return EXIT; + } +} diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TryExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TryExecutor.java index eed2801b..a4442bf2 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TryExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/TryExecutor.java @@ -15,41 +15,99 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.CatchErrors; import io.serverlessworkflow.api.types.ErrorFilter; +import io.serverlessworkflow.api.types.TaskItem; import io.serverlessworkflow.api.types.TryTask; import io.serverlessworkflow.api.types.TryTaskCatch; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowError; import io.serverlessworkflow.impl.WorkflowException; import io.serverlessworkflow.impl.WorkflowFilter; +import io.serverlessworkflow.impl.WorkflowPosition; import io.serverlessworkflow.impl.WorkflowUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; +import java.util.List; import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; import java.util.function.Predicate; -public class TryExecutor extends AbstractTaskExecutor { +public class TryExecutor extends RegularTaskExecutor { private final Optional whenFilter; private final Optional exceptFilter; private final Optional> errorFilter; + private final TaskExecutor taskExecutor; + private final Optional> catchTaskExecutor; - protected TryExecutor(TryTask task, WorkflowDefinition definition) { - super(task, definition); - TryTaskCatch catchInfo = task.getCatch(); - this.errorFilter = buildErrorFilter(catchInfo.getErrors()); - this.whenFilter = - WorkflowUtils.optionalFilter(definition.expressionFactory(), catchInfo.getWhen()); - this.exceptFilter = - WorkflowUtils.optionalFilter(definition.expressionFactory(), catchInfo.getExceptWhen()); + public static class TryExecutorBuilder extends RegularTaskExecutorBuilder { + + private final Optional whenFilter; + private final Optional exceptFilter; + private final Optional> errorFilter; + private final TaskExecutor taskExecutor; + private final Optional> catchTaskExecutor; + + protected TryExecutorBuilder( + WorkflowPosition position, + TryTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + TryTaskCatch catchInfo = task.getCatch(); + this.errorFilter = buildErrorFilter(catchInfo.getErrors()); + this.whenFilter = + WorkflowUtils.optionalFilter(application.expressionFactory(), catchInfo.getWhen()); + this.exceptFilter = + WorkflowUtils.optionalFilter(application.expressionFactory(), catchInfo.getExceptWhen()); + this.taskExecutor = + TaskExecutorHelper.createExecutorList( + position, task.getTry(), workflow, application, resourceLoader); + List catchTask = task.getCatch().getDo(); + this.catchTaskExecutor = + catchTask != null && !catchTask.isEmpty() + ? Optional.of( + TaskExecutorHelper.createExecutorList( + position, task.getCatch().getDo(), workflow, application, resourceLoader)) + : Optional.empty(); + } + + @Override + public TaskExecutor buildInstance() { + return new TryExecutor(this); + } + } + + protected TryExecutor(TryExecutorBuilder builder) { + super(builder); + this.errorFilter = builder.errorFilter; + this.whenFilter = builder.whenFilter; + this.exceptFilter = builder.exceptFilter; + this.taskExecutor = builder.taskExecutor; + this.catchTaskExecutor = builder.catchTaskExecutor; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - try { - TaskExecutorHelper.processTaskList(task.getTry(), workflow, taskContext); - } catch (WorkflowException exception) { + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + return TaskExecutorHelper.processTaskList( + taskExecutor, workflow, Optional.of(taskContext), taskContext.input()) + .exceptionallyCompose(e -> handleException(e, workflow, taskContext)); + } + + private CompletableFuture handleException( + Throwable e, WorkflowContext workflow, TaskContext taskContext) { + if (e instanceof CompletionException) { + return handleException(e.getCause(), workflow, taskContext); + } + if (e instanceof WorkflowException) { + WorkflowException exception = (WorkflowException) e; if (errorFilter.map(f -> f.test(exception.getWorflowError())).orElse(true) && whenFilter .map(w -> w.apply(workflow, taskContext, taskContext.input()).asBoolean()) @@ -57,11 +115,17 @@ protected void internalExecute(WorkflowContext workflow, TaskContext ta && exceptFilter .map(w -> !w.apply(workflow, taskContext, taskContext.input()).asBoolean()) .orElse(true)) { - if (task.getCatch().getDo() != null) { - TaskExecutorHelper.processTaskList(task.getCatch().getDo(), workflow, taskContext); + if (catchTaskExecutor.isPresent()) { + return TaskExecutorHelper.processTaskList( + catchTaskExecutor.get(), workflow, Optional.of(taskContext), taskContext.input()); } + } + return CompletableFuture.completedFuture(taskContext.rawOutput()); + } else { + if (e instanceof RuntimeException) { + throw (RuntimeException) e; } else { - throw exception; + throw new RuntimeException(e); } } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/WaitExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/WaitExecutor.java index a1fb31c5..42e648aa 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/WaitExecutor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/WaitExecutor.java @@ -15,44 +15,71 @@ */ package io.serverlessworkflow.impl.executors; +import com.fasterxml.jackson.databind.JsonNode; import io.serverlessworkflow.api.types.DurationInline; import io.serverlessworkflow.api.types.WaitTask; +import io.serverlessworkflow.api.types.Workflow; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; +import io.serverlessworkflow.impl.WorkflowPosition; +import io.serverlessworkflow.impl.WorkflowStatus; +import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder; +import io.serverlessworkflow.impl.resources.ResourceLoader; import java.time.Duration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; -public class WaitExecutor extends AbstractTaskExecutor { +public class WaitExecutor extends RegularTaskExecutor { - private static Logger logger = LoggerFactory.getLogger(WaitExecutor.class); private final Duration millisToWait; - protected WaitExecutor(WaitTask task, WorkflowDefinition definition) { - super(task, definition); - this.millisToWait = - task.getWait().getDurationInline() != null - ? toLong(task.getWait().getDurationInline()) - : Duration.parse(task.getWait().getDurationExpression()); + public static class WaitExecutorBuilder extends RegularTaskExecutorBuilder { + private final Duration millisToWait; + + protected WaitExecutorBuilder( + WorkflowPosition position, + WaitTask task, + Workflow workflow, + WorkflowApplication application, + ResourceLoader resourceLoader) { + super(position, task, workflow, application, resourceLoader); + this.millisToWait = + task.getWait().getDurationInline() != null + ? toLong(task.getWait().getDurationInline()) + : Duration.parse(task.getWait().getDurationExpression()); + } + + private Duration toLong(DurationInline durationInline) { + Duration duration = Duration.ofMillis(durationInline.getMilliseconds()); + duration.plus(Duration.ofSeconds(durationInline.getSeconds())); + duration.plus(Duration.ofMinutes(durationInline.getMinutes())); + duration.plus(Duration.ofHours(durationInline.getHours())); + duration.plus(Duration.ofDays(durationInline.getDays())); + return duration; + } + + @Override + public TaskExecutor buildInstance() { + return new WaitExecutor(this); + } } - private Duration toLong(DurationInline durationInline) { - Duration duration = Duration.ofMillis(durationInline.getMilliseconds()); - duration.plus(Duration.ofSeconds(durationInline.getSeconds())); - duration.plus(Duration.ofMinutes(durationInline.getMinutes())); - duration.plus(Duration.ofHours(durationInline.getHours())); - duration.plus(Duration.ofDays(durationInline.getDays())); - return duration; + protected WaitExecutor(WaitExecutorBuilder builder) { + super(builder); + this.millisToWait = builder.millisToWait; } @Override - protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) { - try { - Thread.sleep(millisToWait.toMillis()); - } catch (InterruptedException e) { - logger.warn("Waiting thread was interrupted", e); - Thread.currentThread().interrupt(); - } + protected CompletableFuture internalExecute( + WorkflowContext workflow, TaskContext taskContext) { + workflow.instance().status(WorkflowStatus.WAITING); + return new CompletableFuture() + .completeOnTimeout(taskContext.output(), millisToWait.toMillis(), TimeUnit.MILLISECONDS) + .thenApply( + node -> { + workflow.instance().status(WorkflowStatus.RUNNING); + return node; + }); } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/Expression.java b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/Expression.java index 42566c77..122fc6d8 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/Expression.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/Expression.java @@ -20,5 +20,5 @@ import io.serverlessworkflow.impl.WorkflowContext; public interface Expression { - JsonNode eval(WorkflowContext workflowContext, TaskContext context, JsonNode node); + JsonNode eval(WorkflowContext workflowContext, TaskContext context, JsonNode node); } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/ExpressionUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/ExpressionUtils.java index 7f776322..c91ef3a2 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/ExpressionUtils.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/ExpressionUtils.java @@ -34,7 +34,7 @@ public static Map buildExpressionMap( } public static Map evaluateExpressionMap( - Map origMap, WorkflowContext workflow, TaskContext task, JsonNode n) { + Map origMap, WorkflowContext workflow, TaskContext task, JsonNode n) { return new ProxyMap( origMap, o -> @@ -50,7 +50,7 @@ public static Object buildExpressionObject(Object obj, ExpressionFactory factory } public static Object evaluateExpressionObject( - Object obj, WorkflowContext workflow, TaskContext task, JsonNode node) { + Object obj, WorkflowContext workflow, TaskContext task, JsonNode node) { return obj instanceof Map ? ExpressionUtils.evaluateExpressionMap((Map) obj, workflow, task, node) : obj; diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/JQExpression.java b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/JQExpression.java index 0207d3b5..6041515a 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/JQExpression.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/JQExpression.java @@ -41,7 +41,7 @@ public JQExpression(Supplier scope, String expr, Version version) } @Override - public JsonNode eval(WorkflowContext workflow, TaskContext task, JsonNode node) { + public JsonNode eval(WorkflowContext workflow, TaskContext task, JsonNode node) { JsonNodeOutput output = new JsonNodeOutput(); try { internalExpr.apply(createScope(workflow, task), node, output); @@ -75,17 +75,23 @@ public JsonNode getResult() { } } - private Scope createScope(WorkflowContext workflow, TaskContext task) { + private Scope createScope(WorkflowContext workflow, TaskContext task) { Scope childScope = Scope.newChildScope(scope.get()); - childScope.setValue("input", task.input()); - childScope.setValue("output", task.output()); - childScope.setValue("context", workflow.context()); - childScope.setValue( - "runtime", - () -> JsonUtils.fromValue(workflow.definition().runtimeDescriptorFactory().get())); - childScope.setValue("workflow", () -> JsonUtils.fromValue(WorkflowDescriptor.of(workflow))); - childScope.setValue("task", () -> JsonUtils.fromValue(TaskDescriptor.of(task))); - task.variables().forEach((k, v) -> childScope.setValue(k, JsonUtils.fromValue(v))); + if (task != null) { + childScope.setValue("input", task.input()); + childScope.setValue("output", task.output()); + childScope.setValue("task", () -> JsonUtils.fromValue(TaskDescriptor.of(task))); + task.variables().forEach((k, v) -> childScope.setValue(k, JsonUtils.fromValue(v))); + } + if (workflow != null) { + childScope.setValue("context", workflow.context()); + childScope.setValue( + "runtime", + () -> + JsonUtils.fromValue( + workflow.definition().application().runtimeDescriptorFactory().get())); + childScope.setValue("workflow", () -> JsonUtils.fromValue(WorkflowDescriptor.of(workflow))); + } return childScope; } } diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/TaskDescriptor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/TaskDescriptor.java index a78bffa7..f1e04cba 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/TaskDescriptor.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/expressions/TaskDescriptor.java @@ -19,17 +19,17 @@ import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.impl.TaskContext; -public record TaskDescriptor( +public record TaskDescriptor( String name, String reference, - T definition, + TaskBase definition, JsonNode rawInput, JsonNode rawOutput, DateTimeDescriptor startedAt) { - public static TaskDescriptor of(TaskContext context) { - return new TaskDescriptor( - context.position().last().toString(), + public static TaskDescriptor of(TaskContext context) { + return new TaskDescriptor( + context.taskName(), context.position().jsonPointer(), context.task(), context.rawInput(), diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/json/JsonUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/json/JsonUtils.java index 0726c2be..37d5c668 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/json/JsonUtils.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/json/JsonUtils.java @@ -34,6 +34,9 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -87,6 +90,12 @@ public Supplier supplier() { }; } + public static OffsetDateTime toOffsetDateTime(JsonNode node) { + return node.isTextual() + ? OffsetDateTime.parse(node.asText()) + : OffsetDateTime.ofInstant(Instant.ofEpochMilli(node.asLong()), ZoneOffset.UTC); + } + /* * Implementation note: * Although we can use directly ObjectMapper.convertValue for implementing fromValue and toJavaValue methods, diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/resources/DynamicResource.java b/impl/core/src/main/java/io/serverlessworkflow/impl/resources/DynamicResource.java index ee9432c0..accac01e 100644 --- a/impl/core/src/main/java/io/serverlessworkflow/impl/resources/DynamicResource.java +++ b/impl/core/src/main/java/io/serverlessworkflow/impl/resources/DynamicResource.java @@ -22,5 +22,5 @@ import java.util.Optional; public interface DynamicResource { - InputStream open(WorkflowContext workflow, Optional> task, JsonNode input); + InputStream open(WorkflowContext workflow, Optional task, JsonNode input); } diff --git a/impl/core/src/test/java/io/serverlessworkflow/impl/EventDefinitionTest.java b/impl/core/src/test/java/io/serverlessworkflow/impl/EventDefinitionTest.java new file mode 100644 index 00000000..981b149d --- /dev/null +++ b/impl/core/src/test/java/io/serverlessworkflow/impl/EventDefinitionTest.java @@ -0,0 +1,135 @@ +/* + * Copyright 2020-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.serverlessworkflow.impl; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.serverlessworkflow.api.WorkflowReader; +import io.serverlessworkflow.impl.json.JsonUtils; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +public class EventDefinitionTest { + + private static WorkflowApplication appl; + + @BeforeAll + static void init() { + appl = WorkflowApplication.builder().build(); + } + + @ParameterizedTest + @MethodSource("eventListenerParameters") + void testEventListened(String listen, String emit, JsonNode expectedResult, Object emitInput) + throws IOException { + WorkflowDefinition listenDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath(listen)); + WorkflowDefinition emitDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath(emit)); + WorkflowInstance waitingInstance = listenDefinition.instance(Map.of()); + CompletableFuture future = waitingInstance.start(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.WAITING); + emitDefinition.instance(emitInput).start().join(); + assertThat(future).isCompleted(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.COMPLETED); + assertThat(waitingInstance.outputAsJsonNode()).isEqualTo(expectedResult); + } + + @ParameterizedTest + @MethodSource("eventsListenerParameters") + void testEventsListened(String listen, String emit1, String emit2, JsonNode expectedResult) + throws IOException { + WorkflowDefinition listenDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath(listen)); + WorkflowDefinition emitDoctorDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath(emit1)); + WorkflowDefinition emitOutDefinition = + appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath(emit2)); + WorkflowInstance waitingInstance = listenDefinition.instance(Map.of()); + CompletableFuture future = waitingInstance.start(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.WAITING); + emitDoctorDefinition.instance(Map.of("temperature", 35)).start().join(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.WAITING); + emitDoctorDefinition.instance(Map.of("temperature", 39)).start().join(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.WAITING); + emitOutDefinition.instance(Map.of()).start().join(); + assertThat(future).isCompleted(); + assertThat(waitingInstance.status()).isEqualTo(WorkflowStatus.COMPLETED); + assertThat(waitingInstance.outputAsJsonNode()).isEqualTo(expectedResult); + } + + private static Stream eventListenerParameters() { + return Stream.of( + Arguments.of("listen-to-any.yaml", "emit.yaml", array(cruellaDeVil()), Map.of()), + Arguments.of( + "listen-to-any-filter.yaml", "emit-doctor.yaml", doctor(), Map.of("temperature", 39))); + } + + private static Stream eventsListenerParameters() { + return Stream.of( + Arguments.of( + "listen-to-all.yaml", + "emit-doctor.yaml", + "emit.yaml", + array(temperature(), cruellaDeVil())), + Arguments.of( + "listen-to-any-until-consumed.yaml", + "emit-doctor.yaml", + "emit-out.yaml", + array(temperature()))); + } + + private static JsonNode cruellaDeVil() { + ObjectMapper mapper = JsonUtils.mapper(); + ObjectNode node = mapper.createObjectNode(); + node.set( + "client", mapper.createObjectNode().put("firstName", "Cruella").put("lastName", "de Vil")); + node.set( + "items", + mapper + .createArrayNode() + .add(mapper.createObjectNode().put("breed", "dalmatian").put("quantity", 101))); + return node; + } + + private static JsonNode doctor() { + ObjectNode node = temperature(); + node.put("isSick", true); + return array(node); + } + + private static ObjectNode temperature() { + ObjectNode node = JsonUtils.mapper().createObjectNode(); + node.put("temperature", 39); + return node; + } + + private static JsonNode array(JsonNode... jsonNodes) { + ArrayNode arrayNode = JsonUtils.mapper().createArrayNode(); + for (JsonNode node : jsonNodes) arrayNode.add(node); + return arrayNode; + } +} diff --git a/impl/core/src/test/java/io/serverlessworkflow/impl/WorkflowDefinitionTest.java b/impl/core/src/test/java/io/serverlessworkflow/impl/WorkflowDefinitionTest.java index e2a1dbf2..4ea87283 100644 --- a/impl/core/src/test/java/io/serverlessworkflow/impl/WorkflowDefinitionTest.java +++ b/impl/core/src/test/java/io/serverlessworkflow/impl/WorkflowDefinitionTest.java @@ -16,30 +16,28 @@ package io.serverlessworkflow.impl; import static io.serverlessworkflow.api.WorkflowReader.readWorkflowFromClasspath; +import static io.serverlessworkflow.api.WorkflowReader.validation; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowableOfType; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import io.serverlessworkflow.impl.json.JsonUtils; import java.io.IOException; import java.time.Instant; import java.util.Arrays; import java.util.Map; +import java.util.concurrent.CompletionException; import java.util.function.Consumer; import java.util.stream.Stream; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class WorkflowDefinitionTest { private static WorkflowApplication appl; - private static Logger logger = LoggerFactory.getLogger(WorkflowDefinitionTest.class); private static Instant before; @BeforeAll @@ -52,7 +50,7 @@ static void init() { @MethodSource("provideParameters") void testWorkflowExecution(String fileName, Consumer assertions) throws IOException { - assertions.accept(appl.workflowDefinition(readWorkflowFromClasspath(fileName))); + assertions.accept(appl.workflowDefinition(readWorkflowFromClasspath(fileName, validation()))); } private static Stream provideParameters() { @@ -60,50 +58,25 @@ private static Stream provideParameters() { args( "switch-then-string.yaml", Map.of("orderType", "electronic"), - o -> - assertThat(o.output()) - .isEqualTo( - Map.of( - "orderType", "electronic", "validate", true, "status", "fulfilled"))), + o -> assertThat(o).isEqualTo(Map.of("validate", true, "status", "fulfilled"))), args( "switch-then-string.yaml", Map.of("orderType", "physical"), o -> - assertThat(o.output()) - .isEqualTo( - Map.of( - "orderType", - "physical", - "inventory", - "clear", - "items", - 1, - "address", - "Elmer St"))), + assertThat(o) + .isEqualTo(Map.of("inventory", "clear", "items", 1, "address", "Elmer St"))), args( "switch-then-string.yaml", Map.of("orderType", "unknown"), - o -> - assertThat(o.output()) - .isEqualTo( - Map.of( - "orderType", - "unknown", - "log", - "warn", - "message", - "something's wrong"))), + o -> assertThat(o).isEqualTo(Map.of("log", "warn", "message", "something's wrong"))), args( "for-sum.yaml", Map.of("input", Arrays.asList(1, 2, 3)), - o -> assertThat(o.output()).isEqualTo(6)), + o -> assertThat(o).isEqualTo(6)), args( "for-collect.yaml", Map.of("input", Arrays.asList(1, 2, 3)), - o -> - assertThat(o.output()) - .isEqualTo( - Map.of("input", Arrays.asList(1, 2, 3), "output", Arrays.asList(2, 4, 6)))), + o -> assertThat(o).isEqualTo(Map.of("output", Arrays.asList(2, 4, 6)))), args( "simple-expression.yaml", Map.of("input", Arrays.asList(1, 2, 3)), @@ -119,16 +92,25 @@ private static Stream provideParameters() { args( "fork.yaml", Map.of(), - o -> - assertThat(((ObjectNode) o.outputAsJsonNode()).get("patientId").asText()) - .isIn("John", "Smith")), - args("fork-no-compete.yaml", Map.of(), WorkflowDefinitionTest::checkNotCompeteOuput)); + o -> assertThat(((Map) o).get("patientId")).isIn("John", "Smith")), + argsJson("fork-no-compete.yaml", Map.of(), WorkflowDefinitionTest::checkNotCompeteOuput)); } private static Arguments args( - String fileName, Map input, Consumer instance) { + String fileName, Map input, Consumer instance) { + return Arguments.of( + fileName, + (Consumer) + d -> + instance.accept( + d.instance(input).start().thenApply(JsonUtils::toJavaValue).join())); + } + + private static Arguments argsJson( + String fileName, Map input, Consumer instance) { return Arguments.of( - fileName, (Consumer) d -> instance.accept(d.execute(input))); + fileName, + (Consumer) d -> instance.accept(d.instance(input).start().join())); } private static Arguments args( @@ -136,12 +118,21 @@ private static Arguments args( return Arguments.of( fileName, (Consumer) - d -> consumer.accept(catchThrowableOfType(clazz, () -> d.execute(Map.of())))); + d -> + checkWorkflowException( + catchThrowableOfType( + CompletionException.class, () -> d.instance(Map.of()).start().join()), + consumer, + clazz)); + } + + private static void checkWorkflowException( + CompletionException ex, Consumer consumer, Class clazz) { + assertThat(ex.getCause()).isInstanceOf(clazz); + consumer.accept(clazz.cast(ex.getCause())); } - private static void checkNotCompeteOuput(WorkflowInstance instance) { - JsonNode out = instance.outputAsJsonNode(); - logger.debug("Output is {}", out); + private static void checkNotCompeteOuput(JsonNode out) { assertThat(out).isInstanceOf(ArrayNode.class); assertThat(out).hasSize(2); ArrayNode array = (ArrayNode) out; @@ -167,8 +158,8 @@ private static void checkWorkflowException(WorkflowException ex) { assertThat(ex.getWorflowError().instance()).isEqualTo("do/0/notImplemented"); } - private static void checkSpecialKeywords(WorkflowInstance obj) { - Map result = (Map) obj.output(); + private static void checkSpecialKeywords(Object obj) { + Map result = (Map) obj; assertThat(Instant.ofEpochMilli((long) result.get("startedAt"))) .isAfterOrEqualTo(before) .isBeforeOrEqualTo(Instant.now()); diff --git a/impl/core/src/test/resources/emit-doctor.yaml b/impl/core/src/test/resources/emit-doctor.yaml new file mode 100644 index 00000000..b940b9cd --- /dev/null +++ b/impl/core/src/test/resources/emit-doctor.yaml @@ -0,0 +1,14 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: emit-doctor + version: '0.1.0' +do: + - emitEvent: + emit: + event: + with: + source: https://hospital.com + type: com.fake-hospital.vitals.measurements.temperature + data: + temperature: ${.temperature} \ No newline at end of file diff --git a/impl/core/src/test/resources/emit-out.yaml b/impl/core/src/test/resources/emit-out.yaml new file mode 100644 index 00000000..41582f34 --- /dev/null +++ b/impl/core/src/test/resources/emit-out.yaml @@ -0,0 +1,12 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: emit-out + version: '0.1.0' +do: + - emitEvent: + emit: + event: + with: + source: https://hospital.com + type: com.fake-hospital.patient.checked-out \ No newline at end of file diff --git a/impl/core/src/test/resources/emit.yaml b/impl/core/src/test/resources/emit.yaml new file mode 100644 index 00000000..d4d6d559 --- /dev/null +++ b/impl/core/src/test/resources/emit.yaml @@ -0,0 +1,19 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: emit + version: '0.1.0' +do: + - emitEvent: + emit: + event: + with: + source: https://petstore.com + type: com.petstore.order.placed.v1 + data: + client: + firstName: Cruella + lastName: de Vil + items: + - breed: dalmatian + quantity: 101 \ No newline at end of file diff --git a/impl/core/src/test/resources/for-collect.yaml b/impl/core/src/test/resources/for-collect.yaml index 7bcc48c2..53dd8231 100644 --- a/impl/core/src/test/resources/for-collect.yaml +++ b/impl/core/src/test/resources/for-collect.yaml @@ -13,5 +13,5 @@ do: from: '{input: .input, output: []}' do: - sumIndex: - output: - as: .output+=[$number+$index+1] \ No newline at end of file + set: + output: ${.output+[$number+$index+1]} \ No newline at end of file diff --git a/impl/core/src/test/resources/for-sum.yaml b/impl/core/src/test/resources/for-sum.yaml index e0fe106b..6d89d9ff 100644 --- a/impl/core/src/test/resources/for-sum.yaml +++ b/impl/core/src/test/resources/for-sum.yaml @@ -4,16 +4,13 @@ document: name: for-sum-example version: '0.1.0' do: - - initCounter: - set: - counter: 0 - sumAll: for: each: number in: .input do: - accumulate: - output: - as: .counter+=$number - output: - as: .counter \ No newline at end of file + set: + counter: ${.counter+$number} + output: + as: .counter diff --git a/impl/core/src/test/resources/listen-to-all.yaml b/impl/core/src/test/resources/listen-to-all.yaml new file mode 100644 index 00000000..0d55f185 --- /dev/null +++ b/impl/core/src/test/resources/listen-to-all.yaml @@ -0,0 +1,15 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: listen-to-all + version: '0.1.0' +do: + - callDoctor: + listen: + to: + all: + - with: + type: com.fake-hospital.vitals.measurements.temperature + data: ${ .temperature > 38 } + - with: + type: com.petstore.order.placed.v1 \ No newline at end of file diff --git a/impl/core/src/test/resources/listen-to-any-filter.yaml b/impl/core/src/test/resources/listen-to-any-filter.yaml new file mode 100644 index 00000000..49185870 --- /dev/null +++ b/impl/core/src/test/resources/listen-to-any-filter.yaml @@ -0,0 +1,25 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: listen-to-any-filter + version: '0.1.0' +do: + - callDoctor: + listen: + to: + any: + - with: + type: com.fake-hospital.vitals.measurements.temperature + data: ${ .temperature > 38 } + - with: + type: com.fake-hospital.vitals.measurements.bpm + data: ${ .bpm < 60 or .bpm > 100 } + until: ( . | length ) > 0 + foreach: + item: event + do: + - isSick: + set: + temperature: ${$event.temperature} + isSick: true + \ No newline at end of file diff --git a/impl/core/src/test/resources/listen-to-any-until-consumed.yaml b/impl/core/src/test/resources/listen-to-any-until-consumed.yaml new file mode 100644 index 00000000..62f04d2d --- /dev/null +++ b/impl/core/src/test/resources/listen-to-any-until-consumed.yaml @@ -0,0 +1,20 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: listen-to-any-until-consumed + version: '0.1.0' +do: + - callDoctor: + listen: + to: + any: + - with: + type: com.fake-hospital.vitals.measurements.temperature + data: ${ .temperature > 38 } + - with: + type: com.fake-hospital.vitals.measurements.bpm + data: ${ .bpm < 60 or .bpm > 100 } + until: + one: + with: + type: com.fake-hospital.patient.checked-out \ No newline at end of file diff --git a/impl/core/src/test/resources/listen-to-any.yaml b/impl/core/src/test/resources/listen-to-any.yaml new file mode 100644 index 00000000..b4a9fcb9 --- /dev/null +++ b/impl/core/src/test/resources/listen-to-any.yaml @@ -0,0 +1,10 @@ +document: + dsl: '1.0.0-alpha5' + namespace: test + name: listen-to-any + version: '0.1.0' +do: + - callDoctor: + listen: + to: + any: [] \ No newline at end of file diff --git a/impl/core/src/test/resources/switch-then-string.yaml b/impl/core/src/test/resources/switch-then-string.yaml index a35ebd45..4093a6fa 100644 --- a/impl/core/src/test/resources/switch-then-string.yaml +++ b/impl/core/src/test/resources/switch-then-string.yaml @@ -15,31 +15,17 @@ do: - default: then: handleUnknownOrderType - processElectronicOrder: - do: - - validatePayment: - set: - validate: true - - fulfillOrder: - set: - status: fulfilled - then: exit + set: + validate: true + status: fulfilled + then: exit - processPhysicalOrder: - do: - - checkInventory: - set: - inventory: clear - - packItems: - set: - items: 1 - - scheduleShipping: - set: - address: Elmer St + set: + inventory: clear + items: 1 + address: Elmer St then: exit - handleUnknownOrderType: - do: - - logWarning: - set: - log: warn - - notifyAdmin: - set: - message: something's wrong + set: + log: warn + message: something's wrong diff --git a/impl/http/pom.xml b/impl/http/pom.xml index b67f2cd5..fdf2a168 100644 --- a/impl/http/pom.xml +++ b/impl/http/pom.xml @@ -1,43 +1,45 @@ - - 4.0.0 - - io.serverlessworkflow - serverlessworkflow-impl - 7.0.0-alpha5.1 - - serverlessworkflow-impl-http - - - org.glassfish.jersey.core - jersey-client - - - org.glassfish.jersey.media - jersey-media-json-jackson - - - io.serverlessworkflow - serverlessworkflow-impl-core - - - org.junit.jupiter - junit-jupiter-api - test - - - org.junit.jupiter - junit-jupiter-engine - test - - - org.junit.jupiter - junit-jupiter-params - test - - - org.assertj - assertj-core - test - - + + 4.0.0 + + io.serverlessworkflow + serverlessworkflow-impl + 8.0.0-SNAPSHOT + + serverlessworkflow-impl-http + Serverless Workflow :: Impl :: HTTP + + + org.glassfish.jersey.core + jersey-client + + + org.glassfish.jersey.media + jersey-media-json-jackson + + + io.serverlessworkflow + serverlessworkflow-impl-core + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.assertj + assertj-core + test + + \ No newline at end of file diff --git a/impl/http/src/main/java/io/serverlessworkflow/impl/executors/HttpExecutor.java b/impl/http/src/main/java/io/serverlessworkflow/impl/executors/HttpExecutor.java index 13e61d35..3c078309 100644 --- a/impl/http/src/main/java/io/serverlessworkflow/impl/executors/HttpExecutor.java +++ b/impl/http/src/main/java/io/serverlessworkflow/impl/executors/HttpExecutor.java @@ -24,14 +24,15 @@ import io.serverlessworkflow.api.types.TaskBase; import io.serverlessworkflow.api.types.UriTemplate; import io.serverlessworkflow.impl.TaskContext; +import io.serverlessworkflow.impl.WorkflowApplication; import io.serverlessworkflow.impl.WorkflowContext; -import io.serverlessworkflow.impl.WorkflowDefinition; import io.serverlessworkflow.impl.WorkflowError; import io.serverlessworkflow.impl.WorkflowException; import io.serverlessworkflow.impl.expressions.Expression; import io.serverlessworkflow.impl.expressions.ExpressionFactory; import io.serverlessworkflow.impl.expressions.ExpressionUtils; import io.serverlessworkflow.impl.json.JsonUtils; +import io.serverlessworkflow.impl.resources.ResourceLoader; import jakarta.ws.rs.HttpMethod; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.client.Client; @@ -41,6 +42,7 @@ import jakarta.ws.rs.client.WebTarget; import java.util.Map; import java.util.Map.Entry; +import java.util.concurrent.CompletableFuture; public class HttpExecutor implements CallableTask { @@ -53,33 +55,34 @@ public class HttpExecutor implements CallableTask { @FunctionalInterface private interface TargetSupplier { - WebTarget apply(WorkflowContext workflow, TaskContext task, JsonNode node); + WebTarget apply(WorkflowContext workflow, TaskContext task, JsonNode node); } @FunctionalInterface private interface RequestSupplier { - JsonNode apply(Builder request, WorkflowContext workflow, TaskContext task, JsonNode node); + JsonNode apply(Builder request, WorkflowContext workflow, TaskContext task, JsonNode node); } @Override - public void init(CallHTTP task, WorkflowDefinition definition) { + public void init(CallHTTP task, WorkflowApplication application, ResourceLoader resourceLoader) { HTTPArguments httpArgs = task.getWith(); - this.targetSupplier = getTargetSupplier(httpArgs.getEndpoint(), definition.expressionFactory()); + this.targetSupplier = + getTargetSupplier(httpArgs.getEndpoint(), application.expressionFactory()); this.headersMap = httpArgs.getHeaders() != null ? ExpressionUtils.buildExpressionMap( - httpArgs.getHeaders().getAdditionalProperties(), definition.expressionFactory()) + httpArgs.getHeaders().getAdditionalProperties(), application.expressionFactory()) : Map.of(); this.queryMap = httpArgs.getQuery() != null ? ExpressionUtils.buildExpressionMap( - httpArgs.getQuery().getAdditionalProperties(), definition.expressionFactory()) + httpArgs.getQuery().getAdditionalProperties(), application.expressionFactory()) : Map.of(); switch (httpArgs.getMethod().toUpperCase()) { case HttpMethod.POST: Object body = ExpressionUtils.buildExpressionObject( - httpArgs.getBody(), definition.expressionFactory()); + httpArgs.getBody(), application.expressionFactory()); this.requestFunction = (request, workflow, context, node) -> request.post( @@ -94,8 +97,8 @@ public void init(CallHTTP task, WorkflowDefinition definition) { } @Override - public JsonNode apply( - WorkflowContext workflow, TaskContext taskContext, JsonNode input) { + public CompletableFuture apply( + WorkflowContext workflow, TaskContext taskContext, JsonNode input) { WebTarget target = targetSupplier.apply(workflow, taskContext, input); for (Entry entry : ExpressionUtils.evaluateExpressionMap(queryMap, workflow, taskContext, input).entrySet()) { @@ -104,13 +107,18 @@ public JsonNode apply( Builder request = target.request(); ExpressionUtils.evaluateExpressionMap(headersMap, workflow, taskContext, input) .forEach(request::header); - try { - return requestFunction.apply(request, workflow, taskContext, input); - } catch (WebApplicationException exception) { - throw new WorkflowException( - WorkflowError.communication(exception.getResponse().getStatus(), taskContext, exception) - .build()); - } + return CompletableFuture.supplyAsync( + () -> { + try { + return requestFunction.apply(request, workflow, taskContext, input); + } catch (WebApplicationException exception) { + throw new WorkflowException( + WorkflowError.communication( + exception.getResponse().getStatus(), taskContext, exception) + .build()); + } + }, + workflow.definition().application().executorService()); } @Override @@ -158,7 +166,7 @@ public ExpressionURISupplier(Expression expr) { } @Override - public WebTarget apply(WorkflowContext workflow, TaskContext task, JsonNode node) { + public WebTarget apply(WorkflowContext workflow, TaskContext task, JsonNode node) { return client.target(expr.eval(workflow, task, node).asText()); } } diff --git a/impl/http/src/test/java/io/serverlessworkflow/impl/HTTPWorkflowDefinitionTest.java b/impl/http/src/test/java/io/serverlessworkflow/impl/HTTPWorkflowDefinitionTest.java index f3d77bdd..7492be53 100644 --- a/impl/http/src/test/java/io/serverlessworkflow/impl/HTTPWorkflowDefinitionTest.java +++ b/impl/http/src/test/java/io/serverlessworkflow/impl/HTTPWorkflowDefinitionTest.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowableOfType; +import io.serverlessworkflow.impl.json.JsonUtils; import java.io.IOException; import java.util.Map; import java.util.stream.Stream; @@ -42,7 +43,12 @@ static void init() { @MethodSource("provideParameters") void testWorkflowExecution(String fileName, Object input, Condition condition) throws IOException { - assertThat(appl.workflowDefinition(readWorkflowFromClasspath(fileName)).execute(input).output()) + assertThat( + appl.workflowDefinition(readWorkflowFromClasspath(fileName)) + .instance(input) + .start() + .thenApply(JsonUtils::toJavaValue) + .join()) .is(condition); } @@ -56,17 +62,29 @@ void testWrongSchema(String fileName) { IllegalArgumentException exception = catchThrowableOfType( IllegalArgumentException.class, - () -> appl.workflowDefinition(readWorkflowFromClasspath(fileName)).execute(Map.of())); + () -> appl.workflowDefinition(readWorkflowFromClasspath(fileName)).instance(Map.of())); assertThat(exception) .isNotNull() .hasMessageContaining("There are JsonSchema validation errors"); } + private static boolean httpCondition(Object obj) { + Map map = (Map) obj; + return map.containsKey("photoUrls") || map.containsKey("petId"); + } + private static Stream provideParameters() { Map petInput = Map.of("petId", 10); + Map starTrekInput = Map.of("uid", "MOMA0000092393"); Condition petCondition = + new Condition<>(HTTPWorkflowDefinitionTest::httpCondition, "callHttpCondition"); + Condition starTrekCondition = new Condition<>( - o -> ((Map) o).containsKey("photoUrls"), "callHttpCondition"); + o -> + ((Map) ((Map) o).get("movie")) + .get("title") + .equals("Star Trek"), + "StartTrek"); return Stream.of( Arguments.of("callGetHttp.yaml", petInput, petCondition), Arguments.of( @@ -75,16 +93,9 @@ private static Stream provideParameters() { new Condition<>( o -> ((Map) o).containsKey("petId"), "notFoundCondition")), Arguments.of("call-http-endpoint-interpolation.yaml", petInput, petCondition), + Arguments.of("call-http-query-parameters.yaml", starTrekInput, starTrekCondition), Arguments.of( - "call-http-query-parameters.yaml", - Map.of("searchQuery", "R2-D2"), - new Condition<>( - o -> ((Map) o).get("count").equals(1), "R2D2Condition")), - Arguments.of( - "call-http-query-parameters-external-schema.yaml", - Map.of("searchQuery", "Luke Skywalker"), - new Condition<>( - o -> ((Map) o).get("count").equals(1), "TheRealJediCondition")), + "call-http-query-parameters-external-schema.yaml", starTrekInput, starTrekCondition), Arguments.of( "callPostHttp.yaml", Map.of("name", "Javierito", "status", "available"), diff --git a/impl/http/src/test/resources/call-http-endpoint-interpolation.yaml b/impl/http/src/test/resources/call-http-endpoint-interpolation.yaml index 5c1239f0..43ba4988 100644 --- a/impl/http/src/test/resources/call-http-endpoint-interpolation.yaml +++ b/impl/http/src/test/resources/call-http-endpoint-interpolation.yaml @@ -4,10 +4,17 @@ document: name: call-http-shorthand-endpoint version: '0.1.0' do: - - getPet: - call: http - with: - headers: - content-type: application/json - method: get - endpoint: ${ "https://petstore.swagger.io/v2/pet/\(.petId)" } \ No newline at end of file + - tryGetPet: + try: + - getPet: + call: http + with: + headers: + content-type: application/json + method: get + endpoint: ${ "https://petstore.swagger.io/v2/pet/\(.petId)" } + catch: + errors: + with: + type: https://serverlessworkflow.io/spec/1.0.0/errors/communication + status: 404 \ No newline at end of file diff --git a/impl/http/src/test/resources/call-http-query-parameters-external-schema.yaml b/impl/http/src/test/resources/call-http-query-parameters-external-schema.yaml index 9488592e..467b3632 100644 --- a/impl/http/src/test/resources/call-http-query-parameters-external-schema.yaml +++ b/impl/http/src/test/resources/call-http-query-parameters-external-schema.yaml @@ -8,11 +8,11 @@ input: resource: endpoint: schema/searchquery.yaml do: - - searchStarWarsCharacters: + - searchStarTrekMovies: call: http with: method: get - endpoint: https://swapi.dev/api/people/ + endpoint: https://stapi.co/api/v1/rest/movie query: - search: ${.searchQuery} + uid: ${.uid} diff --git a/impl/http/src/test/resources/call-http-query-parameters.yaml b/impl/http/src/test/resources/call-http-query-parameters.yaml index d209bf07..b207d092 100644 --- a/impl/http/src/test/resources/call-http-query-parameters.yaml +++ b/impl/http/src/test/resources/call-http-query-parameters.yaml @@ -8,16 +8,16 @@ input: document: type: object required: - - searchQuery + - uid properties: - searchQuery: + uid: type: string do: - - searchStarWarsCharacters: + - searchStarTrekMovies: call: http with: method: get - endpoint: https://swapi.dev/api/people/ + endpoint: https://stapi.co/api/v1/rest/movie query: - search: ${.searchQuery} + uid: ${.uid} diff --git a/impl/http/src/test/resources/schema/searchquery.yaml b/impl/http/src/test/resources/schema/searchquery.yaml index f6dde131..26b8e8d2 100644 --- a/impl/http/src/test/resources/schema/searchquery.yaml +++ b/impl/http/src/test/resources/schema/searchquery.yaml @@ -1,6 +1,6 @@ type: object required: - - searchQuery + - uid properties: - searchQuery: + uid: type: string \ No newline at end of file diff --git a/impl/pom.xml b/impl/pom.xml index 6a380017..65fa9095 100644 --- a/impl/pom.xml +++ b/impl/pom.xml @@ -1,42 +1,66 @@ - - 4.0.0 - - io.serverlessworkflow - serverlessworkflow-parent - 7.0.0-alpha5.1 - - serverlessworkflow-impl - pom - - 3.1.9 - - - - + + 4.0.0 + io.serverlessworkflow - serverlessworkflow-impl-core - ${project.version} - - - io.serverlessworkflow - serverlessworkflow-impl-http - ${project.version} - - - org.glassfish.jersey.core - jersey-client - ${version.org.glassfish.jersey} - - - org.glassfish.jersey.media - jersey-media-json-jackson - ${version.org.glassfish.jersey} - - - - - http - core - bom - + serverlessworkflow-parent + 8.0.0-SNAPSHOT + + serverlessworkflow-impl + Serverless Workflow :: Impl + pom + + 3.1.10 + 4.0.1 + 1.2.0 + 5.2.3 + + + + + io.serverlessworkflow + serverlessworkflow-impl-core + ${project.version} + + + io.serverlessworkflow + serverlessworkflow-impl-http + ${project.version} + + + org.glassfish.jersey.core + jersey-client + ${version.org.glassfish.jersey} + + + org.glassfish.jersey.media + jersey-media-json-jackson + ${version.org.glassfish.jersey} + + + io.cloudevents + cloudevents-api + ${version.io.cloudevents} + + + io.cloudevents + cloudevents-json-jackson + ${version.io.cloudevents} + + + net.thisptr + jackson-jq + ${version.net.thisptr} + + + com.github.f4b6a3 + ulid-creator + ${version.com.github.f4b6a3} + + + + + http + core + \ No newline at end of file diff --git a/pom.xml b/pom.xml index 501bf66d..e054f843 100644 --- a/pom.xml +++ b/pom.xml @@ -1,40 +1,41 @@ - - 4.0.0 + + 4.0.0 - io.serverlessworkflow - serverlessworkflow-parent - 7.0.0-alpha5.1 - pom + io.serverlessworkflow + serverlessworkflow-parent + 8.0.0-SNAPSHOT + pom - Serverless Workflow :: Parent - https://serverlessworkflow.io/sdk-java/ - Java SDK for Serverless Workflow Specification - 2020 - - - serverless-workflow - Serverless Workflow Specification Authors - CNCF - - - - CNCF - https://www.cncf.io// - - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - scm:git:git@github.com:serverlessworkflow/sdk-java.git - scm:git:git@github.com:serverlessworkflow/sdk-java.git - https://github.com/serverlessworkflow/sdk-java - 7.0.0-alpha5.1 - + Serverless Workflow :: Parent + https://serverlessworkflow.io/sdk-java/ + Java SDK for Serverless Workflow Specification + 2020 + + + serverless-workflow + Serverless Workflow Specification Authors + CNCF + + + + CNCF + https://www.cncf.io// + + + + The Apache Software License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + scm:git:git@github.com:serverlessworkflow/sdk-java.git + scm:git:git@github.com:serverlessworkflow/sdk-java.git + https://github.com/serverlessworkflow/sdk-java + HEAD + api @@ -42,47 +43,48 @@ impl - - 17 - ${java.version} - ${java.version} - UTF-8 - 3.9.7 + + 17 + ${java.version} + ${java.version} + UTF-8 + 3.9.7 - - 3.2.1 - 3.6.0 - 3.13.0 - 3.1.3 - 3.5.0 - 3.5.2 - 2.25 - 3.2.7 - 3.4.2 - ${java.version} - 1.2.2 - 3.11.1 - 3.1.1 - 3.3.1 - 3.5.2 + + 3.2.1 + 3.6.0 + 3.13.0 + 3.1.3 + 3.5.0 + 3.5.2 + 2.25 + 3.2.7 + 3.4.2 + ${java.version} + 1.2.2 + 3.11.2 + 3.1.1 + 3.3.1 + 3.5.2 + 1.6.13 - - 1.5.12 - 2.18.2 - 1.5.4 - 3.1.0 - 1.5.2 - 3.26.3 - 5.11.3 - 5.14.2 - 2.0.16 - 8.0.1.Final - 5.0.0 + + 1.5.16 + 2.18.2 + 1.5.5 + 3.1.1 + 1.5.2 + 3.27.3 + 5.11.4 + 5.15.2 + 2.0.16 + 8.0.2.Final + 5.0.0 - - true - + true + - - java - true - + + java + true + - - - - org.slf4j - slf4j-api - ${version.org.slf4j} - - - com.fasterxml.jackson.core - jackson-core - ${version.com.fasterxml.jackson} - - - com.fasterxml.jackson.core - jackson-databind - ${version.com.fasterxml.jackson} - - - com.networknt - json-schema-validator - ${version.com.networknt} - - - org.hibernate.validator - hibernate-validator - ${version.org.hibernate.validator} - - - org.glassfish.expressly - expressly - ${version.org.glassfish.expressly} - + + + + org.slf4j + slf4j-api + ${version.org.slf4j} + + + com.fasterxml.jackson.core + jackson-core + ${version.com.fasterxml.jackson} + + + com.fasterxml.jackson.core + jackson-databind + ${version.com.fasterxml.jackson} + + + com.networknt + json-schema-validator + ${version.com.networknt} + + + org.hibernate.validator + hibernate-validator + ${version.org.hibernate.validator} + + + org.glassfish.expressly + expressly + ${version.org.glassfish.expressly} + - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - ${version.com.fasterxml.jackson} - - - org.jsonschema2pojo - jsonschema2pojo-core - ${version.jsonschema2pojo-maven-plugin} - - - jakarta.validation - jakarta.validation-api - ${version.jakarta.validation} - - - - - org.junit.jupiter - junit-jupiter-api - ${version.org.junit.jupiter} - test - - - org.junit.jupiter - junit-jupiter-engine - ${version.org.junit.jupiter} - test - - - org.junit.jupiter - junit-jupiter-params - ${version.org.junit.jupiter} - test - - - org.mockito - mockito-core - ${version.org.mockito} - test - - - ch.qos.logback - logback-classic - ${version.ch.qos.logback} - test - - - org.assertj - assertj-core - ${version.org.assertj} - test - - - + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${version.com.fasterxml.jackson} + + + org.jsonschema2pojo + jsonschema2pojo-core + ${version.jsonschema2pojo-maven-plugin} + + + jakarta.validation + jakarta.validation-api + ${version.jakarta.validation} + - - - - - org.codehaus.mojo - buildnumber-maven-plugin - ${version.buildnumber.plugin} - - - get-scm-revision - initialize - - create - - - false - false - UNKNOWN - true - - - - - - maven-compiler-plugin - ${version.compiler.plugin} - - true - true - ${maven.compiler.source} - ${maven.compiler.target} - ${maven.compiler.source} - ${maven.compiler.target} - true - - -Xlint:unchecked - - - - + + + org.junit.jupiter + junit-jupiter-api + ${version.org.junit.jupiter} + test + + + org.junit.jupiter + junit-jupiter-engine + ${version.org.junit.jupiter} + test + + + org.junit.jupiter + junit-jupiter-params + ${version.org.junit.jupiter} + test + + + org.mockito + mockito-core + ${version.org.mockito} + test + + + ch.qos.logback + logback-classic + ${version.ch.qos.logback} + test + + + org.assertj + assertj-core + ${version.org.assertj} + test + + + + + + + + + org.codehaus.mojo + buildnumber-maven-plugin + ${version.buildnumber.plugin} + + + get-scm-revision + initialize + + create + + + false + false + UNKNOWN + true + + + + + + maven-compiler-plugin + ${version.compiler.plugin} + + true + true + ${maven.compiler.source} + ${maven.compiler.target} + ${maven.compiler.source} + ${maven.compiler.target} + true + + -Xlint:unchecked + + + + org.apache.maven.plugins maven-checkstyle-plugin - - + + - - + + @@ -293,228 +295,239 @@ - + - - - - org.apache.maven.plugins - maven-gpg-plugin - ${version.gpg.plugin} - - - maven-deploy-plugin - ${version.deploy.plugin} - - 10 - - - - org.apache.maven.plugins - maven-enforcer-plugin - ${version.enforcer.plugin} - - - enforce-versions - - enforce - - - - - ${version.maven} - - - ${version.jdk} - - - - - - - - org.apache.maven.plugins - maven-source-plugin - ${version.source.plugin} - - - attach-sources - - jar-no-fork - - - - - - true - - - true - - - true - - - - ${project.url} - ${java.version} - ${java.vendor} - ${os.name} - ${os.arch} - ${os.version} - ${project.scm.url} - ${project.scm.connection} - ${buildNumber} - - - - - - org.apache.maven.plugins - maven-release-plugin - ${version.release.plugin} - - clean install - true - @{project.version} - false - true - false - - - - org.jsonschema2pojo - jsonschema2pojo-maven-plugin - ${version.jsonschema2pojo-maven-plugin} - - - org.apache.maven.plugins - maven-surefire-plugin - ${version.surefire.plugin} - - -Xmx1024m -XX:+IgnoreUnrecognizedVMOptions -XX:MaxPermSize=256m - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${version.failsafe.plugin} - - -Xmx1024m -XX:+IgnoreUnrecognizedVMOptions -XX:MaxPermSize=256m - - - - org.apache.maven.plugins - maven-checkstyle-plugin - ${version.checkstyle.plugin} - - - com.spotify.fmt - fmt-maven-plugin - ${version.fmt-maven-plugin} - - - org.apache.maven.plugins - maven-jar-plugin - ${version.jar.plugin} - - - true - - - true - - - true - - - - ${project.url} - ${java.version} - ${java.vendor} - ${os.name} - ${os.arch} - ${os.version} - ${project.scm.url} - ${project.scm.connection} - ${buildNumber} - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - ${version.javadoc.plugin} - - false - - - - - + + + + org.sonatype.plugins + nexus-staging-maven-plugin + ${version.nexus.plugin} + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-gpg-plugin + ${version.gpg.plugin} + + + maven-deploy-plugin + ${version.deploy.plugin} + + 10 + + + + org.apache.maven.plugins + maven-enforcer-plugin + ${version.enforcer.plugin} + + + enforce-versions + + enforce + + + + + ${version.maven} + + + ${version.jdk} + + + + + + + + org.apache.maven.plugins + maven-source-plugin + ${version.source.plugin} + + + attach-sources + + jar-no-fork + + + + + + true + + + true + + + true + + + + ${project.url} + ${java.version} + ${java.vendor} + ${os.name} + ${os.arch} + ${os.version} + ${project.scm.url} + ${project.scm.connection} + ${buildNumber} + + + + + + org.apache.maven.plugins + maven-release-plugin + ${version.release.plugin} + + clean install + true + @{project.version} + false + true + false + + + + org.jsonschema2pojo + jsonschema2pojo-maven-plugin + ${version.jsonschema2pojo-maven-plugin} + + + org.apache.maven.plugins + maven-surefire-plugin + ${version.surefire.plugin} + + -Xmx1024m -XX:+IgnoreUnrecognizedVMOptions -XX:MaxPermSize=256m + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${version.failsafe.plugin} + + -Xmx1024m -XX:+IgnoreUnrecognizedVMOptions -XX:MaxPermSize=256m + + + + org.apache.maven.plugins + maven-checkstyle-plugin + ${version.checkstyle.plugin} + + + com.spotify.fmt + fmt-maven-plugin + ${version.fmt-maven-plugin} + + + org.apache.maven.plugins + maven-jar-plugin + ${version.jar.plugin} + + + true + + + true + + + true + + + + ${project.url} + ${java.version} + ${java.vendor} + ${os.name} + ${os.arch} + ${os.version} + ${project.scm.url} + ${project.scm.connection} + ${buildNumber} + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${version.javadoc.plugin} + + false + + + + + - - - ossrh-snapshots - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - + + + ossrh-snapshots + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + - - - central - Central Repository - https://repo.maven.apache.org/maven2 - default - - false - - - + + + central + Central Repository + https://repo.maven.apache.org/maven2 + default + + false + + + - - - release - - - - org.apache.maven.plugins - maven-gpg-plugin - - - --pinentry-mode - loopback - - - - - sign-artifacts - verify - - sign - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - package - - jar - - - - - - - - + + + release + + + + org.apache.maven.plugins + maven-gpg-plugin + + + --pinentry-mode + loopback + + + + + sign-artifacts + verify + + sign + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + package + + jar + + + + + + + + pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy