diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a2c5933..63ee3c3 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @tsurdilo @ricardozanini +* @ricardozanini diff --git a/.github/OWNERS b/.github/OWNERS index 9ed057f..066fb2b 100644 --- a/.github/OWNERS +++ b/.github/OWNERS @@ -1,6 +1,4 @@ reviewers: - - tsurdilo - ricardozanini approvers: - - tsurdilo - ricardozanini diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 8226f60..9e9416c 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -1,10 +1,10 @@ -# Copyright 2020 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ # limitations under the License. name: Go SDK PR Checks + on: pull_request: paths-ignore: @@ -22,22 +23,30 @@ on: - "Makefile" branches: - main + + +permissions: + contents: read + env: - GO_VERSION: 1.19 + GO_VERSION: 1.22 + jobs: basic_checks: name: Basic Checks runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 - - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v3 + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} id: go - - name: Cache dependencies - uses: actions/cache@v3 + + - name: Cache Go Modules + uses: actions/cache@v4 with: path: | ~/.cache/go-build @@ -45,38 +54,73 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - name: Cache tools - uses: actions/cache@v3 + + - name: Cache Tools + uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} restore-keys: | ${{ runner.os }}-go-tools- + - name: Check Headers run: | make addheaders changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are missing the headers: \n$changed_files\n Did you run 'make lint' before sending the PR" && exit 1) + if [[ -n "$changed_files" ]]; then + echo "❌ Some files are missing headers:\n$changed_files" + exit 1 + fi + - name: Check Formatting run: | make fmt changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are not formatted properly: \n$changed_files\n Did you run 'make test' before sending the PR?" && exit 1) - - name: Check lint - uses: golangci/golangci-lint-action@v3 + if [[ -n "$changed_files" ]]; then + echo "❌ Some files are not formatted correctly:\n$changed_files" + exit 1 + fi + + - name: Run Linter + uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues with: version: latest - - name: Install cover - run: go get -modfile=tools.mod golang.org/x/tools/cmd/cover - - name: Validate codcov yaml file - run: curl -vvv --data-binary @codecov.yml https://codecov.io/validate + + - name: Install Cover Tool + run: go install golang.org/x/tools/cmd/cover@latest + - name: Run Unit Tests + run: go test ./... -coverprofile=test_coverage.out -covermode=atomic + + - name: Upload Coverage Report + uses: actions/upload-artifact@v4 + with: + name: Test Coverage Report + path: test_coverage.out + + integration_tests: + name: Integration Tests + runs-on: ubuntu-latest + needs: basic_checks + steps: + - name: Checkout Code + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + id: go + + - name: Run Integration Tests run: | - go test ./... -coverprofile test_coverage.out -covermode=atomic - - name: Upload results to codecov - uses: codecov/codecov-action@v1 + chmod +x ./hack/integration-test.sh + ./hack/integration-test.sh + continue-on-error: true + + - name: Upload JUnit Report + if: always() + uses: actions/upload-artifact@v4 with: - file: ./test_coverage.out - flags: sdk-go - name: sdk-go - fail_ci_if_error: true + name: Integration Test JUnit Report + path: ./integration-test-junit.xml diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml new file mode 100644 index 0000000..826fe00 --- /dev/null +++ b/.github/workflows/stale.yaml @@ -0,0 +1,36 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Mark stale issues and pull requests +on: + schedule: + - cron: "0 0 * * *" +permissions: + issues: write + pull-requests: write +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' + stale-pr-message: 'This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' + stale-issue-label: 'Stale Issue' + exempt-issue-labels: 'Status: Blocked, Status: In progress, Status: On hold, Status: Awaiting response' + stale-pr-label: 'Stale PR' + exempt-pr-labels: 'Status: Blocked, Status: In progress, Status: On hold, Status: Awaiting response' + days-before-stale: 45 + days-before-close: 20 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 036f9f5..914d9c4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,6 @@ bin .idea *.out -.vscode \ No newline at end of file +.vscode + +integration-test-junit.xml diff --git a/.lift.toml b/.lift.toml new file mode 100644 index 0000000..f9516c9 --- /dev/null +++ b/.lift.toml @@ -0,0 +1,3 @@ +ignoreFiles = """ +model/zz_generated.deepcopy.go +""" \ No newline at end of file diff --git a/MAINTAINERS.md b/MAINTAINERS.md index f618c14..54af970 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,3 +1,5 @@ # Serverless Workflow Go SDK Maintainers -* [Ricardo Zanini](https://github.com/ricardozanini) \ No newline at end of file +* [Ricardo Zanini](https://github.com/ricardozanini) +* [Filippe Spolti](https://github.com/spolti) +* \ No newline at end of file diff --git a/Makefile b/Makefile index 826475e..34bfc91 100644 --- a/Makefile +++ b/Makefile @@ -1,19 +1,37 @@ addheaders: - @command -v addlicense > /dev/null || go install -modfile=tools.mod -v github.com/google/addlicense + @command -v addlicense > /dev/null || (echo "πŸš€ Installing addlicense..."; go install -modfile=tools.mod -v github.com/google/addlicense) @addlicense -c "The Serverless Workflow Specification Authors" -l apache . fmt: @go vet ./... @go fmt ./... +goimports: + @command -v goimports > /dev/null || (echo "πŸš€ Installing goimports..."; go install golang.org/x/tools/cmd/goimports@latest) + @goimports -w . + lint: - @command -v golangci-lint > /dev/null || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin" - make addheaders - make fmt - ./hack/go-lint.sh + @echo "πŸš€ Installing/updating golangci-lint…" + GO111MODULE=on go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest + + @echo "πŸš€ Running lint…" + @make addheaders + @make goimports + @make fmt + @$(GOPATH)/bin/golangci-lint run ./... ${params} + @echo "βœ… Linting completed!" .PHONY: test coverage="false" + test: - make lint + @echo "πŸ§ͺ Running tests..." @go test ./... + @echo "βœ… Tests completed!" + +.PHONY: integration-test + +integration-test: + @echo "πŸ”„ Running integration tests..." + @./hack/integration-test.sh + @echo "βœ… Integration tests completed!" \ No newline at end of file diff --git a/OWNERS b/OWNERS new file mode 100644 index 0000000..bc971b8 --- /dev/null +++ b/OWNERS @@ -0,0 +1,8 @@ +# List of usernames who may use /lgtm +reviewers: +- ricardozanini +- spolti + +# List of usernames who may use /approve +approvers: +- ricardozanini \ No newline at end of file diff --git a/README.md b/README.md index 9c8ef9c..36f11c8 100644 --- a/README.md +++ b/README.md @@ -1,63 +1,214 @@ # Go SDK for Serverless Workflow -Here you will find all the [specification types](https://github.com/serverlessworkflow/specification/blob/main/schema/workflow.json) defined by our Json Schemas, in Go. +The Go SDK for Serverless Workflow provides strongly-typed structures for the [Serverless Workflow specification](https://github.com/serverlessworkflow/specification/blob/v1.0.0/schema/workflow.yaml). It simplifies parsing, validating, and interacting with workflows in Go. Starting from version `v3.1.0`, the SDK also includes a partial reference implementation, allowing users to execute workflows directly within their Go applications. -Current status of features implemented in the SDK is listed in the table below: +--- -| Feature | Status | -|-------------------------------------------- | ------------------ | -| Parse workflow JSON and YAML definitions | :heavy_check_mark: | -| Programmatically build workflow definitions | :no_entry_sign: | -| Validate workflow definitions (Schema) | :heavy_check_mark: | -| Validate workflow definitions (Integrity) | :heavy_check_mark: | -| Generate workflow diagram (SVG) | :no_entry_sign: | +## Table of Contents + +- [Status](#status) +- [Releases](#releases) +- [Getting Started](#getting-started) + - [Installation](#installation) + - [Basic Usage](#basic-usage) + - [Parsing Workflow Files](#parsing-workflow-files) + - [Programmatic Workflow Creation](#programmatic-workflow-creation) +- [Reference Implementation](#reference-implementation) + - [Example: Running a Workflow](#example-running-a-workflow) +- [Slack Community](#slack-community) +- [Contributing](#contributing) + - [Code Style](#code-style) + - [EditorConfig](#editorconfig) + - [Known Issues](#known-issues) + +--- ## Status -| Latest Releases | Conformance to spec version | -| :---: | :---: | -| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | -| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | -| [v2.1.x](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.0) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +This table indicates the current state of implementation of various SDK features: -## How to use +| Feature | Status | +|-------------------------------------------- |---------------------| +| Parse workflow JSON and YAML definitions | :heavy_check_mark: | +| Programmatically build workflow definitions | :heavy_check_mark: | +| Validate workflow definitions (Schema) | :heavy_check_mark: | +| Specification Implementation | :heavy_check_mark:* | +| Validate workflow definitions (Integrity) | :no_entry_sign: | +| Generate workflow diagram (SVG) | :no_entry_sign: | -Run the following command in the root of your Go's project: +> **Note**: *Implementation is partial; contributions are encouraged. -```shell script -$ go get github.com/serverlessworkflow/sdk-go/v2 -``` +--- -Your `go.mod` file should be updated to add a dependency from the Serverless Workflow specification. +## Releases -To use the generated types, import the package in your go file like this: +| Latest Releases | Conformance to Spec Version | +|:--------------------------------------------------------------------------:|:---------------------------------------------------------------------------------:| +| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | +| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.5.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.5.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v3.1.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.1.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0) | -```go -import "github.com/serverlessworkflow/sdk-go/v2/model" -``` +--- + +## Reference Implementation + +The SDK provides a partial reference runner to execute your workflows: + +### Example: Running a Workflow -Then just reference the package in your Go file like `myfunction := model.Function{}`. +Below is a simple YAML workflow that sets a message and then prints it: -### Parsing Serverless Workflow files +```yaml +document: + dsl: "1.0.0" + namespace: "examples" + name: "simple-workflow" + version: "1.0.0" +do: + - set: + message: "Hello from the Serverless Workflow SDK in Go!" +``` + +You can execute this workflow using the following Go program: -Serverless Workflow Specification supports YAML and JSON files for Workflow definitions. -To transform such files into a Go data structure, use: +Example of executing a workflow defined in YAML: ```go -package sw +package main import ( - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/parser" + "fmt" + "os" + "path/filepath" + + "github.com/serverlessworkflow/sdk-go/v3/impl" + "github.com/serverlessworkflow/sdk-go/v3/parser" ) -func ParseWorkflow(filePath string) (*model.Workflow, error) { - workflow, err := parser.FromFile(filePath) +func RunWorkflow(workflowFilePath string, input map[string]interface{}) (interface{}, error) { + data, err := os.ReadFile(filepath.Clean(workflowFilePath)) + if err != nil { + return nil, err + } + workflow, err := parser.FromYAMLSource(data) + if err != nil { + return nil, err + } + + runner := impl.NewDefaultRunner(workflow) + output, err := runner.Run(input) if err != nil { return nil, err - } - return workflow, nil -} + } + return output, nil +} + +func main() { + output, err := RunWorkflow("./myworkflow.yaml", map[string]interface{}{"shouldCall": true}) + if err != nil { + panic(err) + } + fmt.Printf("Workflow completed with output: %v\n", output) +} +``` + +### Implementation Roadmap + +The table below lists the current state of this implementation. This table is a roadmap for the project based on the [DSL Reference doc](https://github.com/serverlessworkflow/specification/blob/v1.0.0/dsl-reference.md). + +| Feature | State | +| ----------- | --------------- | +| Workflow Document | βœ… | +| Workflow Use | 🟑 | +| Workflow Schedule | ❌ | +| Task Call | ❌ | +| Task Do | βœ… | +| Task Emit | ❌ | +| Task For | βœ… | +| Task Fork | βœ… | +| Task Listen | ❌ | +| Task Raise | βœ… | +| Task Run | ❌ | +| Task Set | βœ… | +| Task Switch | βœ… | +| Task Try | ❌ | +| Task Wait | ❌ | +| Lifecycle Events | 🟑 | +| External Resource | ❌ | +| Authentication | ❌ | +| Catalog | ❌ | +| Extension | ❌ | +| Error | βœ… | +| Event Consumption Strategies | ❌ | +| Retry | ❌ | +| Input | βœ… | +| Output | βœ… | +| Export | βœ… | +| Timeout | ❌ | +| Duration | ❌ | +| Endpoint | βœ… | +| HTTP Response | ❌ | +| HTTP Request | ❌ | +| URI Template | βœ… | +| Container Lifetime | ❌ | +| Process Result | ❌ | +| AsyncAPI Server | ❌ | +| AsyncAPI Outbound Message | ❌ | +| AsyncAPI Subscription | ❌ | +| Workflow Definition Reference | βœ… | +| Subscription Iterator | ❌ | + +We love contributions! Our aim is to have a complete implementation to serve as a reference or to become a project on its own to favor the CNCF Ecosystem. + +If you are willing to help, please [file a sub-task](https://github.com/serverlessworkflow/sdk-go/issues/221) in this EPIC describing what you are planning to work on first. + +--- + +## Slack Community + +Join our community on the CNCF Slack to collaborate, ask questions, and contribute: + +[CNCF Slack Invite](https://communityinviter.com/apps/cloud-native/cncf) + +Find us in the `#serverless-workflow-sdk` channel. + +--- + +## Contributing + +Your contributions are very welcome! + +### Code Style + +- Format imports with `goimports`. +- Run static analysis using: + +```shell +make lint ``` -The `Workflow` structure then can be used in your application. +Automatically fix lint issues: + +```shell +make lint params=--fix +``` + +### EditorConfig + +A sample `.editorconfig` for IntelliJ or GoLand users can be found [here](contrib/intellij.editorconfig). + +### Known Issues + +- **MacOS Issue**: If you encounter `goimports: can't extract issues from gofmt diff output`, resolve it with: + +```shell +brew install diffutils +``` + +--- + +Contributions are greatly appreciated! Check [this EPIC](https://github.com/serverlessworkflow/sdk-go/issues/221) and contribute to completing more features. + +Happy coding! diff --git a/builder/builder.go b/builder/builder.go new file mode 100644 index 0000000..45ccc2e --- /dev/null +++ b/builder/builder.go @@ -0,0 +1,67 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "encoding/json" + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" + + "sigs.k8s.io/yaml" +) + +// New initializes a new WorkflowBuilder instance. +func New() *model.WorkflowBuilder { + return model.NewWorkflowBuilder() +} + +// Yaml generates YAML output from the WorkflowBuilder using custom MarshalYAML implementations. +func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { + workflow, err := Object(builder) + if err != nil { + return nil, fmt.Errorf("failed to build workflow object: %w", err) + } + return yaml.Marshal(workflow) +} + +// Json generates JSON output from the WorkflowBuilder. +func Json(builder *model.WorkflowBuilder) ([]byte, error) { + workflow, err := Object(builder) + if err != nil { + return nil, fmt.Errorf("failed to build workflow object: %w", err) + } + return json.MarshalIndent(workflow, "", " ") +} + +// Object builds and validates the Workflow object from the builder. +func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { + workflow := builder.Build() + + // Validate the workflow object + if err := model.GetValidator().Struct(workflow); err != nil { + return nil, fmt.Errorf("workflow validation failed: %w", err) + } + + return workflow, nil +} + +// Validate validates any given object using the Workflow model validator. +func Validate(object interface{}) error { + if err := model.GetValidator().Struct(object); err != nil { + return fmt.Errorf("validation failed: %w", err) + } + return nil +} diff --git a/builder/builder_test.go b/builder/builder_test.go new file mode 100644 index 0000000..6bf459c --- /dev/null +++ b/builder/builder_test.go @@ -0,0 +1,177 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "errors" + "testing" + + validator "github.com/go-playground/validator/v10" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/test" + + "github.com/stretchr/testify/assert" +) + +func TestBuilder_Yaml(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + // Generate YAML from the builder + yamlData, err := Yaml(builder) + assert.NoError(t, err) + + // Define the expected YAML structure + expectedYAML := `document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: +- task1: + call: http + if: ${condition} + with: + method: GET + endpoint: http://example.com +` + + // Use assertYAMLEq to compare YAML structures + test.AssertYAMLEq(t, expectedYAML, string(yamlData)) +} + +func TestBuilder_Json(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + jsonData, err := Json(builder) + assert.NoError(t, err) + + expectedJSON := `{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "if": "${condition}", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +}` + assert.JSONEq(t, expectedJSON, string(jsonData)) +} + +func TestBuilder_Object(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + workflow, err := Object(builder) + assert.NoError(t, err) + assert.NotNil(t, workflow) + + assert.Equal(t, "1.0.0", workflow.Document.DSL) + assert.Equal(t, "examples", workflow.Document.Namespace) + assert.Equal(t, "example-workflow", workflow.Document.Name) + assert.Equal(t, "1.0.0", workflow.Document.Version) + assert.Len(t, *workflow.Do, 1) + assert.Equal(t, "http", (*workflow.Do)[0].Task.(*model.CallHTTP).Call) +} + +func TestBuilder_Validate(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{ + DSL: "1.0.0", + Namespace: "examples", + Name: "example-workflow", + Version: "1.0.0", + }, + Do: &model.TaskList{ + &model.TaskItem{ + Key: "task1", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }, + }, + }, + } + + err := Validate(workflow) + assert.NoError(t, err) + + // Test validation failure + workflow.Do = &model.TaskList{ + &model.TaskItem{ + Key: "task2", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", // Missing Endpoint + }, + }, + }, + } + err = Validate(workflow) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + t.Logf("Validation errors: %v", validationErrors) + assert.Contains(t, validationErrors.Error(), "Do[0].Task.With.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") + } +} diff --git a/code-of-conduct.md b/code-of-conduct.md index ddd14b6..97a8526 100644 --- a/code-of-conduct.md +++ b/code-of-conduct.md @@ -1,58 +1,11 @@ -## CNCF Community Code of Conduct v1.0 +# Code of Conduct -Other languages available: -- [Chinese/δΈ­ζ–‡](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/zh.md) -- [German/Deutsch](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/de.md) -- [Spanish/EspaΓ±ol](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/es.md) -- [French/FranΓ§ais](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/fr.md) -- [Italian/Italiano](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/it.md) -- [Japanese/ζ—₯本θͺž](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/jp.md) -- [Korean/ν•œκ΅­μ–΄](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ko.md) -- [Ukrainian/Π£ΠΊΡ€Π°Ρ—Π½ΡΡŒΠΊΠ°](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/uk.md) -- [Russian/Русский](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ru.md) -- [Portuguese/PortuguΓͺs](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pt.md) -- [Arabic/Ψ§Ω„ΨΉΨ±Ψ¨ΩŠΨ©](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ar.md) -- [Polish/Polski](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pl.md) +We follow the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/main/code-of-conduct.md). -### Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering -an open and welcoming community, we pledge to respect all people who contribute -through reporting issues, posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for -everyone, regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, body size, race, ethnicity, age, -religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing others' private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are not -aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers -commit themselves to fairly and consistently applying these principles to every aspect -of managing this project. Project maintainers who do not follow or enforce the Code of -Conduct may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior in Kubernetes may be reported by contacting the [Kubernetes Code of Conduct Committee](https://git.k8s.io/community/committee-code-of-conduct) via conduct@kubernetes.io. For other projects, please contact a CNCF project maintainer or our mediator, Mishi Choudhary via mishi@linux.com. - -This Code of Conduct is adapted from the Contributor Covenant -(), version 1.2.0, available at - - -### CNCF Events Code of Conduct - -CNCF events are governed by the Linux Foundation [Code of Conduct](https://events.linuxfoundation.org/code-of-conduct/) available on the event page. -This is designed to be compatible with the above policy and also includes more details on responding to incidents. \ No newline at end of file + +Please contact the [CNCF Code of Conduct Committee](mailto:conduct@cncf.io) +in order to report violations of the Code of Conduct. diff --git a/contrib/intellij.editorconfig b/contrib/intellij.editorconfig new file mode 100644 index 0000000..5b5a1ca --- /dev/null +++ b/contrib/intellij.editorconfig @@ -0,0 +1,30 @@ +root = true + +[{*.go,*.go2}] +indent_style = tab +ij_continuation_indent_size = 4 +ij_go_GROUP_CURRENT_PROJECT_IMPORTS = true +ij_go_add_leading_space_to_comments = true +ij_go_add_parentheses_for_single_import = false +ij_go_call_parameters_new_line_after_left_paren = true +ij_go_call_parameters_right_paren_on_new_line = true +ij_go_call_parameters_wrap = off +ij_go_fill_paragraph_width = 80 +ij_go_group_stdlib_imports = true +ij_go_import_sorting = goimports +ij_go_keep_indents_on_empty_lines = false +ij_go_local_group_mode = project +ij_go_move_all_imports_in_one_declaration = true +ij_go_move_all_stdlib_imports_in_one_group = false +ij_go_remove_redundant_import_aliases = true +ij_go_run_go_fmt_on_reformat = true +ij_go_use_back_quotes_for_imports = false +ij_go_wrap_comp_lit = off +ij_go_wrap_comp_lit_newline_after_lbrace = true +ij_go_wrap_comp_lit_newline_before_rbrace = true +ij_go_wrap_func_params = off +ij_go_wrap_func_params_newline_after_lparen = true +ij_go_wrap_func_params_newline_before_rparen = true +ij_go_wrap_func_result = off +ij_go_wrap_func_result_newline_after_lparen = true +ij_go_wrap_func_result_newline_before_rparen = true \ No newline at end of file diff --git a/go.mod b/go.mod index 88d4311..646715d 100644 --- a/go.mod +++ b/go.mod @@ -1,28 +1,35 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v3 -go 1.19 +go 1.23.0 + +toolchain go1.24.0 require ( - github.com/go-playground/validator/v10 v10.11.0 - github.com/stretchr/testify v1.7.0 - k8s.io/apimachinery v0.25.0 - sigs.k8s.io/yaml v1.3.0 + github.com/go-playground/validator/v10 v10.25.0 + github.com/google/uuid v1.6.0 + github.com/itchyny/gojq v0.12.17 + github.com/stretchr/testify v1.10.0 + github.com/tidwall/gjson v1.18.0 + github.com/xeipuuv/gojsonschema v1.2.0 + sigs.k8s.io/yaml v1.4.0 ) require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-logr/logr v1.2.3 // indirect - github.com/go-playground/locales v0.14.0 // indirect - github.com/go-playground/universal-translator v0.18.0 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/google/gofuzz v1.1.0 // indirect - github.com/leodido/go-urn v1.2.1 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d // indirect - golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect - golang.org/x/text v0.3.7 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/gabriel-vasile/mimetype v1.4.8 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/itchyny/timefmt-go v0.1.6 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + golang.org/x/crypto v0.36.0 // indirect + golang.org/x/net v0.38.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.23.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/klog/v2 v2.70.1 // indirect - sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect ) diff --git a/go.sum b/go.sum index 830ed38..489a35c 100644 --- a/go.sum +++ b/go.sum @@ -1,102 +1,59 @@ -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.11.0 h1:0W+xRM511GY47Yy3bZUbJVitCNg2BOGlCyvTqsp/xIw= -github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= -github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= +github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8= +github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= +github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= +github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= +github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3 h1:0es+/5331RGQPcXlMfP+WrnIIS6dNnNRe0WB02W0F4M= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d h1:3qF+Z8Hkrw9sOhrFHti9TlB1Hkac1x+DNRkv0XQiFjo= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/apimachinery v0.25.0 h1:MlP0r6+3XbkUG2itd6vp3oxbtdQLQI94fD5gCS+gnoU= -k8s.io/apimachinery v0.25.0/go.mod h1:qMx9eAk0sZQGsXGu86fab8tZdffHbwUfsvzqKn4mfB0= -k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= -k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= -sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= -sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/model/common.go b/hack/boilerplate.txt similarity index 61% rename from model/common.go rename to hack/boilerplate.txt index 34c5ac7..3b2e6c3 100644 --- a/model/common.go +++ b/hack/boilerplate.txt @@ -1,4 +1,4 @@ -// Copyright 2021 The Serverless Workflow Specification Authors +// Copyright 2023 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -10,15 +10,4 @@ // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// Common schema for Serverless Workflow specification -type Common struct { - // Metadata information - Metadata Metadata `json:"metadata,omitempty"` -} - -// Metadata information -type Metadata map[string]interface{} +// limitations under the License. \ No newline at end of file diff --git a/hack/go-lint.sh b/hack/go-lint.sh index a9c0251..110ad60 100755 --- a/hack/go-lint.sh +++ b/hack/go-lint.sh @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -golangci-lint run ./... --timeout 2m0s +golangci-lint run -E goimports -E errorlint -E gosec ${1} ./... --timeout 2m0s diff --git a/hack/integration-test.sh b/hack/integration-test.sh new file mode 100755 index 0000000..52f6889 --- /dev/null +++ b/hack/integration-test.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Script to fetch workflow examples, parse, and validate them using the Go parser. + +# Variables +SPEC_REPO="https://github.com/serverlessworkflow/specification" +EXAMPLES_DIR="examples" +PARSER_BINARY="./parser/cmd/main.go" +JUNIT_FILE="./integration-test-junit.xml" + +# Create a temporary directory +TEMP_DIR=$(mktemp -d) + +# Ensure temporary directory was created +if [ ! -d "$TEMP_DIR" ]; then + echo "❌ Failed to create a temporary directory." + exit 1 +fi + +# shellcheck disable=SC2317 +# Clean up the temporary directory on script exit +cleanup() { + echo "🧹 Cleaning up temporary directory..." + rm -rf "$TEMP_DIR" +} +trap cleanup EXIT + +# Fetch the examples directory +echo "πŸ“₯ Fetching workflow examples from ${SPEC_REPO}/${EXAMPLES_DIR}..." +if ! git clone --depth=1 --filter=blob:none --sparse "$SPEC_REPO" "$TEMP_DIR" &> /dev/null; then + echo "❌ Failed to clone specification repository." + exit 1 +fi + +cd "$TEMP_DIR" || exit +if ! git sparse-checkout set "$EXAMPLES_DIR" &> /dev/null; then + echo "❌ Failed to checkout examples directory." + exit 1 +fi + +cd - || exit + +# Prepare JUnit XML output +echo '' > "$JUNIT_FILE" +echo '' >> "$JUNIT_FILE" + +# Initialize test summary +total_tests=0 +failed_tests=0 + +# Walk through files and validate +echo "βš™οΈ Running parser on fetched examples..." +while IFS= read -r file; do + filename=$(basename "$file") + echo "πŸ” Validating: $filename" + + # Run the parser for the file + if go run "$PARSER_BINARY" "$file" > "$TEMP_DIR/validation.log" 2>&1; then + echo "βœ… Validation succeeded for $filename" + echo " " >> "$JUNIT_FILE" + else + echo "❌ Validation failed for $filename" + failure_message=$(cat "$TEMP_DIR/validation.log" | sed 's/&/&/g; s//>/g') + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + ((failed_tests++)) + fi + + ((total_tests++)) +done < <(find "$TEMP_DIR/$EXAMPLES_DIR" -type f \( -name "*.yaml" -o -name "*.yml" -o -name "*.json" \)) + +# Finalize JUnit XML output +echo '' >> "$JUNIT_FILE" + +# Display test summary +if [ $failed_tests -ne 0 ]; then + echo "❌ Validation failed for $failed_tests out of $total_tests workflows." + exit 1 +else + echo "βœ… All $total_tests workflows validated successfully." +fi + +exit 0 diff --git a/impl/ctx/context.go b/impl/ctx/context.go new file mode 100644 index 0000000..ff1d260 --- /dev/null +++ b/impl/ctx/context.go @@ -0,0 +1,443 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ctx + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sync" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + + "github.com/google/uuid" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var ErrWorkflowContextNotFound = errors.New("workflow context not found") + +var _ WorkflowContext = &workflowContext{} + +type ctxKey string + +const ( + runnerCtxKey ctxKey = "wfRunnerContext" + + varsContext = "$context" + varsInput = "$input" + varsOutput = "$output" + varsWorkflow = "$workflow" + varsRuntime = "$runtime" + varsTask = "$task" + + // TODO: script during the release to update this value programmatically + runtimeVersion = "v3.1.0" + runtimeName = "CNCF Serverless Workflow Specification Go SDK" +) + +type WorkflowContext interface { + SetStartedAt(t time.Time) + SetStatus(status StatusPhase) + SetRawInput(input interface{}) + SetInstanceCtx(value interface{}) + GetInstanceCtx() interface{} + SetInput(input interface{}) + GetInput() interface{} + SetOutput(output interface{}) + GetOutput() interface{} + GetOutputAsMap() map[string]interface{} + GetVars() map[string]interface{} + SetTaskStatus(task string, status StatusPhase) + SetTaskRawInput(input interface{}) + SetTaskRawOutput(output interface{}) + SetTaskDef(task model.Task) error + SetTaskStartedAt(startedAt time.Time) + SetTaskName(name string) + SetTaskReference(ref string) + GetTaskReference() string + ClearTaskContext() + SetLocalExprVars(vars map[string]interface{}) + AddLocalExprVars(vars map[string]interface{}) + RemoveLocalExprVars(keys ...string) + Clone() WorkflowContext +} + +// workflowContext holds the necessary data for the workflow execution within the instance. +type workflowContext struct { + mu sync.Mutex + input interface{} // $input can hold any type + output interface{} // $output can hold any type + context map[string]interface{} // Holds `$context` as the key + workflowDescriptor map[string]interface{} // $workflow representation in the context + taskDescriptor map[string]interface{} // $task representation in the context + localExprVars map[string]interface{} // Local expression variables defined in a given task or private context. E.g. a For task $item. + StatusPhase []StatusPhaseLog + TasksStatusPhase map[string][]StatusPhaseLog +} + +func NewWorkflowContext(workflow *model.Workflow) (WorkflowContext, error) { + workflowCtx := &workflowContext{} + workflowDef, err := workflow.AsMap() + if err != nil { + return nil, err + } + workflowCtx.taskDescriptor = map[string]interface{}{} + workflowCtx.workflowDescriptor = map[string]interface{}{ + varsWorkflow: map[string]interface{}{ + "id": uuid.NewString(), + "definition": workflowDef, + }, + } + workflowCtx.SetStatus(PendingStatus) + + return workflowCtx, nil +} + +// WithWorkflowContext adds the workflowContext to a parent context +func WithWorkflowContext(parent context.Context, wfCtx WorkflowContext) context.Context { + return context.WithValue(parent, runnerCtxKey, wfCtx) +} + +// GetWorkflowContext retrieves the workflowContext from a context +func GetWorkflowContext(ctx context.Context) (WorkflowContext, error) { + wfCtx, ok := ctx.Value(runnerCtxKey).(*workflowContext) + if !ok { + return nil, ErrWorkflowContextNotFound + } + return wfCtx, nil +} + +func (ctx *workflowContext) Clone() WorkflowContext { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + newInput := utils.DeepCloneValue(ctx.input) + newOutput := utils.DeepCloneValue(ctx.output) + + // deep clone each of the maps + newContextMap := utils.DeepClone(ctx.context) + newWorkflowDesc := utils.DeepClone(ctx.workflowDescriptor) + newTaskDesc := utils.DeepClone(ctx.taskDescriptor) + newLocalExprVars := utils.DeepClone(ctx.localExprVars) + + newStatusPhase := append([]StatusPhaseLog(nil), ctx.StatusPhase...) + + newTasksStatusPhase := make(map[string][]StatusPhaseLog, len(ctx.TasksStatusPhase)) + for taskName, logs := range ctx.TasksStatusPhase { + newTasksStatusPhase[taskName] = append([]StatusPhaseLog(nil), logs...) + } + + return &workflowContext{ + input: newInput, + output: newOutput, + context: newContextMap, + workflowDescriptor: newWorkflowDesc, + taskDescriptor: newTaskDesc, + localExprVars: newLocalExprVars, + StatusPhase: newStatusPhase, + TasksStatusPhase: newTasksStatusPhase, + } +} + +func (ctx *workflowContext) SetStartedAt(t time.Time) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + wf, ok := ctx.workflowDescriptor[varsWorkflow].(map[string]interface{}) + if !ok { + wf = make(map[string]interface{}) + ctx.workflowDescriptor[varsWorkflow] = wf + } + + startedAt, ok := wf["startedAt"].(map[string]interface{}) + if !ok { + startedAt = make(map[string]interface{}) + wf["startedAt"] = startedAt + } + + startedAt["iso8601"] = t.UTC().Format(time.RFC3339) +} + +func (ctx *workflowContext) SetRawInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + // Ensure the outer "workflow" map + wf, ok := ctx.workflowDescriptor[varsWorkflow].(map[string]interface{}) + if !ok { + wf = make(map[string]interface{}) + ctx.workflowDescriptor[varsWorkflow] = wf + } + + // Store the input + wf["input"] = input +} + +func (ctx *workflowContext) AddLocalExprVars(vars map[string]interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.localExprVars == nil { + ctx.localExprVars = map[string]interface{}{} + } + for k, v := range vars { + ctx.localExprVars[k] = v + } +} + +func (ctx *workflowContext) RemoveLocalExprVars(keys ...string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if ctx.localExprVars == nil { + return + } + + for _, k := range keys { + delete(ctx.localExprVars, k) + } +} + +func (ctx *workflowContext) SetLocalExprVars(vars map[string]interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.localExprVars = vars +} + +func (ctx *workflowContext) GetVars() map[string]interface{} { + vars := make(map[string]interface{}) + vars[varsInput] = ctx.GetInput() + vars[varsOutput] = ctx.GetOutput() + vars[varsContext] = ctx.GetInstanceCtx() + vars[varsTask] = ctx.taskDescriptor[varsTask] + vars[varsWorkflow] = ctx.workflowDescriptor[varsWorkflow] + vars[varsRuntime] = map[string]interface{}{ + "name": runtimeName, + "version": runtimeVersion, + } + for varName, varValue := range ctx.localExprVars { + vars[varName] = varValue + } + return vars +} + +func (ctx *workflowContext) SetStatus(status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.StatusPhase == nil { + ctx.StatusPhase = []StatusPhaseLog{} + } + ctx.StatusPhase = append(ctx.StatusPhase, NewStatusPhaseLog(status)) +} + +// SetInstanceCtx safely sets the `$context` value +func (ctx *workflowContext) SetInstanceCtx(value interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + ctx.context = make(map[string]interface{}) + } + ctx.context[varsContext] = value +} + +// GetInstanceCtx safely retrieves the `$context` value +func (ctx *workflowContext) GetInstanceCtx() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + return nil + } + return ctx.context[varsContext] +} + +// SetInput safely sets the input +func (ctx *workflowContext) SetInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.input = input +} + +// GetInput safely retrieves the input +func (ctx *workflowContext) GetInput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.input +} + +// SetOutput safely sets the output +func (ctx *workflowContext) SetOutput(output interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.output = output +} + +// GetOutput safely retrieves the output +func (ctx *workflowContext) GetOutput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.output +} + +// GetInputAsMap safely retrieves the input as a map[string]interface{}. +// If input is not a map, it creates a map with an empty string key and the input as the value. +func (ctx *workflowContext) GetInputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if inputMap, ok := ctx.input.(map[string]interface{}); ok { + return inputMap + } + + // If input is not a map, create a map with an empty key and set input as the value + return map[string]interface{}{ + "": ctx.input, + } +} + +// GetOutputAsMap safely retrieves the output as a map[string]interface{}. +// If output is not a map, it creates a map with an empty string key and the output as the value. +func (ctx *workflowContext) GetOutputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if outputMap, ok := ctx.output.(map[string]interface{}); ok { + return outputMap + } + + // If output is not a map, create a map with an empty key and set output as the value + return map[string]interface{}{ + "": ctx.output, + } +} + +func (ctx *workflowContext) SetTaskStatus(task string, status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.TasksStatusPhase == nil { + ctx.TasksStatusPhase = map[string][]StatusPhaseLog{} + } + ctx.TasksStatusPhase[task] = append(ctx.TasksStatusPhase[task], NewStatusPhaseLog(status)) +} + +func (ctx *workflowContext) SetTaskRawInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["input"] = input +} + +func (ctx *workflowContext) SetTaskRawOutput(output interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["output"] = output +} + +func (ctx *workflowContext) SetTaskDef(task model.Task) error { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if task == nil { + return errors.New("SetTaskDef called with nil model.Task") + } + + defBytes, err := json.Marshal(task) + if err != nil { + return fmt.Errorf("failed to marshal task: %w", err) + } + + var defMap map[string]interface{} + if err := json.Unmarshal(defBytes, &defMap); err != nil { + return fmt.Errorf("failed to unmarshal task into map: %w", err) + } + + taskMap, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + taskMap = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = taskMap + } + + taskMap["definition"] = defMap + + return nil +} + +func (ctx *workflowContext) SetTaskStartedAt(startedAt time.Time) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["startedAt"] = startedAt.UTC().Format(time.RFC3339) +} + +func (ctx *workflowContext) SetTaskName(name string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["name"] = name +} + +func (ctx *workflowContext) SetTaskReference(ref string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["reference"] = ref +} + +func (ctx *workflowContext) GetTaskReference() string { + ctx.mu.Lock() + defer ctx.mu.Unlock() + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + return "" + } + return task["reference"].(string) +} + +func (ctx *workflowContext) ClearTaskContext() { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.taskDescriptor[varsTask] = make(map[string]interface{}) +} diff --git a/impl/ctx/status_phase.go b/impl/ctx/status_phase.go new file mode 100644 index 0000000..ddcab9c --- /dev/null +++ b/impl/ctx/status_phase.go @@ -0,0 +1,52 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ctx + +import "time" + +type StatusPhase string + +const ( + // PendingStatus The workflow/task has been initiated and is pending execution. + PendingStatus StatusPhase = "pending" + // RunningStatus The workflow/task is currently in progress. + RunningStatus StatusPhase = "running" + // WaitingStatus The workflow/task execution is temporarily paused, awaiting either inbound event(s) or a specified time interval as defined by a wait task. + WaitingStatus StatusPhase = "waiting" + // SuspendedStatus The workflow/task execution has been manually paused by a user and will remain halted until explicitly resumed. + SuspendedStatus StatusPhase = "suspended" + // CancelledStatus The workflow/task execution has been terminated before completion. + CancelledStatus StatusPhase = "cancelled" + // FaultedStatus The workflow/task execution has encountered an error. + FaultedStatus StatusPhase = "faulted" + // CompletedStatus The workflow/task ran to completion. + CompletedStatus StatusPhase = "completed" +) + +func (s StatusPhase) String() string { + return string(s) +} + +type StatusPhaseLog struct { + Timestamp int64 `json:"timestamp"` + Status StatusPhase `json:"status"` +} + +func NewStatusPhaseLog(status StatusPhase) StatusPhaseLog { + return StatusPhaseLog{ + Status: status, + Timestamp: time.Now().UnixMilli(), + } +} diff --git a/impl/expr/expr.go b/impl/expr/expr.go new file mode 100644 index 0000000..77faffb --- /dev/null +++ b/impl/expr/expr.go @@ -0,0 +1,159 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package expr + +import ( + "context" + "errors" + "fmt" + + "github.com/itchyny/gojq" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func TraverseAndEvaluateWithVars(node interface{}, input interface{}, variables map[string]interface{}, nodeContext context.Context) (interface{}, error) { + if err := mergeContextInVars(nodeContext, variables); err != nil { + return nil, err + } + return traverseAndEvaluate(node, input, variables) +} + +// TraverseAndEvaluate recursively processes and evaluates all expressions in a JSON-like structure +func TraverseAndEvaluate(node interface{}, input interface{}, nodeContext context.Context) (interface{}, error) { + return TraverseAndEvaluateWithVars(node, input, map[string]interface{}{}, nodeContext) +} + +func traverseAndEvaluate(node interface{}, input interface{}, variables map[string]interface{}) (interface{}, error) { + switch v := node.(type) { + case map[string]interface{}: + // Traverse map + for key, value := range v { + evaluatedValue, err := traverseAndEvaluate(value, input, variables) + if err != nil { + return nil, err + } + v[key] = evaluatedValue + } + return v, nil + + case []interface{}: + // Traverse array + for i, value := range v { + evaluatedValue, err := traverseAndEvaluate(value, input, variables) + if err != nil { + return nil, err + } + v[i] = evaluatedValue + } + return v, nil + + case string: + // Check if the string is a runtime expression (e.g., ${ .some.path }) + if model.IsStrictExpr(v) { + return evaluateJQExpression(model.SanitizeExpr(v), input, variables) + } + return v, nil + + default: + // Return other types as-is + return v, nil + } +} + +// evaluateJQExpression evaluates a jq expression against a given JSON input +func evaluateJQExpression(expression string, input interface{}, variables map[string]interface{}) (interface{}, error) { + query, err := gojq.Parse(expression) + if err != nil { + return nil, fmt.Errorf("failed to parse jq expression: %s, error: %w", expression, err) + } + + // Get the variable names & values in a single pass: + names, values := getVariableNamesAndValues(variables) + + code, err := gojq.Compile(query, gojq.WithVariables(names)) + if err != nil { + return nil, fmt.Errorf("failed to compile jq expression: %s, error: %w", expression, err) + } + + iter := code.Run(input, values...) + result, ok := iter.Next() + if !ok { + return nil, errors.New("no result from jq evaluation") + } + + // If there's an error from the jq engine, report it + if errVal, isErr := result.(error); isErr { + return nil, fmt.Errorf("jq evaluation error: %w", errVal) + } + + return result, nil +} + +// getVariableNamesAndValues constructs two slices, where 'names[i]' matches 'values[i]'. +func getVariableNamesAndValues(vars map[string]interface{}) ([]string, []interface{}) { + names := make([]string, 0, len(vars)) + values := make([]interface{}, 0, len(vars)) + + for k, v := range vars { + names = append(names, k) + values = append(values, v) + } + return names, values +} + +func mergeContextInVars(nodeCtx context.Context, variables map[string]interface{}) error { + if variables == nil { + variables = make(map[string]interface{}) + } + wfCtx, err := ctx.GetWorkflowContext(nodeCtx) + if err != nil { + if errors.Is(err, ctx.ErrWorkflowContextNotFound) { + return nil + } + return err + } + // merge + for k, val := range wfCtx.GetVars() { + variables[k] = val + } + + return nil +} + +func TraverseAndEvaluateObj(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string, wfCtx context.Context) (output interface{}, err error) { + if runtimeExpr == nil { + return input, nil + } + output, err = TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input, wfCtx) + if err != nil { + return nil, model.NewErrExpression(err, taskName) + } + return output, nil +} + +func TraverseAndEvaluateBool(runtimeExpr string, input interface{}, wfCtx context.Context) (bool, error) { + if len(runtimeExpr) == 0 { + return false, nil + } + output, err := TraverseAndEvaluate(runtimeExpr, input, wfCtx) + if err != nil { + return false, nil + } + if result, ok := output.(bool); ok { + return result, nil + } + return false, nil +} diff --git a/impl/expr/expr_test.go b/impl/expr/expr_test.go new file mode 100644 index 0000000..f2af54a --- /dev/null +++ b/impl/expr/expr_test.go @@ -0,0 +1,263 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package expr + +import ( + "context" + "fmt" + "testing" + + "github.com/itchyny/gojq" +) + +func TestTraverseAndEvaluate(t *testing.T) { + t.Run("Simple no-expression map", func(t *testing.T) { + node := map[string]interface{}{ + "key": "value", + "num": 123, + } + result, err := TraverseAndEvaluate(node, nil, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map") + } + if got["key"] != "value" || got["num"] != 123 { + t.Errorf("TraverseAndEvaluate() returned unexpected map data: %#v", got) + } + }) + + t.Run("Expression in map", func(t *testing.T) { + node := map[string]interface{}{ + "expr": "${ .foo }", + } + input := map[string]interface{}{ + "foo": "bar", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map") + } + if got["expr"] != "bar" { + t.Errorf("TraverseAndEvaluate() = %v, want %v", got["expr"], "bar") + } + }) + + t.Run("Expression in array", func(t *testing.T) { + node := []interface{}{ + "static", + "${ .foo }", + } + input := map[string]interface{}{ + "foo": "bar", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.([]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return an array") + } + if got[0] != "static" { + t.Errorf("TraverseAndEvaluate()[0] = %v, want 'static'", got[0]) + } + if got[1] != "bar" { + t.Errorf("TraverseAndEvaluate()[1] = %v, want 'bar'", got[1]) + } + }) + + t.Run("Nested structures", func(t *testing.T) { + node := map[string]interface{}{ + "level1": []interface{}{ + map[string]interface{}{ + "expr": "${ .foo }", + }, + }, + } + input := map[string]interface{}{ + "foo": "nestedValue", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() error: %v", err) + } + + resMap, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map at top-level") + } + + level1, ok := resMap["level1"].([]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return an array for resMap['level1']") + } + + level1Map, ok := level1[0].(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map for level1[0]") + } + + if level1Map["expr"] != "nestedValue" { + t.Errorf("TraverseAndEvaluate() = %v, want %v", level1Map["expr"], "nestedValue") + } + }) + + t.Run("Invalid JQ expression", func(t *testing.T) { + node := "${ .foo( }" + input := map[string]interface{}{ + "foo": "bar", + } + + _, err := TraverseAndEvaluate(node, input, context.TODO()) + if err == nil { + t.Errorf("TraverseAndEvaluate() expected error for invalid JQ, got nil") + } + }) +} + +func TestTraverseAndEvaluateWithVars(t *testing.T) { + t.Run("Variable usage in expression", func(t *testing.T) { + node := map[string]interface{}{ + "expr": "${ $myVar }", + } + variables := map[string]interface{}{ + "$myVar": "HelloVars", + } + input := map[string]interface{}{} + + result, err := TraverseAndEvaluateWithVars(node, input, variables, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluateWithVars() unexpected error: %v", err) + } + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluateWithVars() did not return a map") + } + if got["expr"] != "HelloVars" { + t.Errorf("TraverseAndEvaluateWithVars() = %v, want %v", got["expr"], "HelloVars") + } + }) + + t.Run("Reference variable that isn't defined", func(t *testing.T) { + // This tries to use a variable that isn't passed in, + // so presumably it yields an error about an undefined variable. + node := "${ $notProvided }" + input := map[string]interface{}{ + "foo": "bar", + } + variables := map[string]interface{}{} // intentionally empty + + _, err := TraverseAndEvaluateWithVars(node, input, variables, context.TODO()) + if err == nil { + t.Errorf("TraverseAndEvaluateWithVars() expected error for undefined variable, got nil") + } else { + t.Logf("Got expected error: %v", err) + } + }) +} + +func TestEvaluateJQExpressionDirect(t *testing.T) { + // This tests the core evaluator directly for errors and success. + t.Run("Successful eval", func(t *testing.T) { + expression := ".foo" + input := map[string]interface{}{"foo": "bar"} + variables := map[string]interface{}{} + result, err := callEvaluateJQ(expression, input, variables) + if err != nil { + t.Fatalf("evaluateJQExpression() error = %v, want nil", err) + } + if result != "bar" { + t.Errorf("evaluateJQExpression() = %v, want 'bar'", result) + } + }) + + t.Run("Parse error", func(t *testing.T) { + expression := ".foo(" + input := map[string]interface{}{"foo": "bar"} + variables := map[string]interface{}{} + _, err := callEvaluateJQ(expression, input, variables) + if err == nil { + t.Errorf("evaluateJQExpression() expected parse error, got nil") + } + }) + + t.Run("Runtime error in evaluation (undefined variable)", func(t *testing.T) { + expression := "$undefinedVar" + input := map[string]interface{}{ + "foo": []interface{}{1, 2}, + } + variables := map[string]interface{}{} + _, err := callEvaluateJQ(expression, input, variables) + if err == nil { + t.Errorf("callEvaluateJQ() expected runtime error, got nil") + } else { + t.Logf("Got expected error: %v", err) + } + }) +} + +// Helper to call the unexported evaluateJQExpression via a wrapper in tests. +// Alternatively, you could move `evaluateJQExpression` into a separate file that +// is also in package `expr`, then test it directly if needed. +func callEvaluateJQ(expression string, input interface{}, variables map[string]interface{}) (interface{}, error) { + // Replicate the logic from evaluateJQExpression for direct testing + query, err := gojq.Parse(expression) + if err != nil { + return nil, fmt.Errorf("failed to parse: %w", err) + } + code, err := gojq.Compile(query, gojq.WithVariables(exprGetVariableNames(variables))) + if err != nil { + return nil, fmt.Errorf("failed to compile: %w", err) + } + iter := code.Run(input, exprGetVariableValues(variables)...) + result, ok := iter.Next() + if !ok { + return nil, fmt.Errorf("no result from jq evaluation") + } + if e, isErr := result.(error); isErr { + return nil, fmt.Errorf("runtime error: %w", e) + } + return result, nil +} + +// Local copies of the variable-gathering logic from your code: +func exprGetVariableNames(variables map[string]interface{}) []string { + names := make([]string, 0, len(variables)) + for name := range variables { + names = append(names, name) + } + return names +} + +func exprGetVariableValues(variables map[string]interface{}) []interface{} { + vals := make([]interface{}, 0, len(variables)) + for _, val := range variables { + vals = append(vals, val) + } + return vals +} diff --git a/impl/json_pointer.go b/impl/json_pointer.go new file mode 100644 index 0000000..dedaaf3 --- /dev/null +++ b/impl/json_pointer.go @@ -0,0 +1,78 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "encoding/json" + "fmt" + "reflect" + "strings" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func findJsonPointer(data interface{}, target string, path string) (string, bool) { + switch node := data.(type) { + case map[string]interface{}: + for key, value := range node { + newPath := fmt.Sprintf("%s/%s", path, key) + if key == target { + return newPath, true + } + if result, found := findJsonPointer(value, target, newPath); found { + return result, true + } + } + case []interface{}: + for i, item := range node { + newPath := fmt.Sprintf("%s/%d", path, i) + if result, found := findJsonPointer(item, target, newPath); found { + return result, true + } + } + } + return "", false +} + +// GenerateJSONPointer Function to generate JSON Pointer from a Workflow reference +func GenerateJSONPointer(workflow *model.Workflow, targetNode interface{}) (string, error) { + // Convert struct to JSON + jsonData, err := json.Marshal(workflow) + if err != nil { + return "", fmt.Errorf("error marshalling to JSON: %w", err) + } + + // Convert JSON to a generic map for traversal + var jsonMap map[string]interface{} + if err := json.Unmarshal(jsonData, &jsonMap); err != nil { + return "", fmt.Errorf("error unmarshalling JSON: %w", err) + } + + transformedNode := "" + switch node := targetNode.(type) { + case string: + transformedNode = node + default: + transformedNode = strings.ToLower(reflect.TypeOf(targetNode).Name()) + } + + // Search for the target node + jsonPointer, found := findJsonPointer(jsonMap, transformedNode, "") + if !found { + return "", fmt.Errorf("node '%s' not found", targetNode) + } + + return jsonPointer, nil +} diff --git a/impl/json_pointer_test.go b/impl/json_pointer_test.go new file mode 100644 index 0000000..aeec1e4 --- /dev/null +++ b/impl/json_pointer_test.go @@ -0,0 +1,140 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +// TestGenerateJSONPointer_SimpleTask tests a simple workflow task. +func TestGenerateJSONPointer_SimpleTask(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "simple-workflow"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "task1", Task: &model.SetTask{Set: map[string]interface{}{"value": 10}}}, + &model.TaskItem{Key: "task2", Task: &model.SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "task2") + assert.NoError(t, err) + assert.Equal(t, "/do/1/task2", jsonPointer) +} + +// TestGenerateJSONPointer_SimpleTask tests a simple workflow task. +func TestGenerateJSONPointer_Document(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "simple-workflow"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "task1", Task: &model.SetTask{Set: map[string]interface{}{"value": 10}}}, + &model.TaskItem{Key: "task2", Task: &model.SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, workflow.Document) + assert.NoError(t, err) + assert.Equal(t, "/document", jsonPointer) +} + +func TestGenerateJSONPointer_ForkTask(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "fork-example"}, + Do: &model.TaskList{ + &model.TaskItem{ + Key: "raiseAlarm", + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: true, + Branches: &model.TaskList{ + &model.TaskItem{Key: "callNurse", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/nurses")}}}, + &model.TaskItem{Key: "callDoctor", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/doctor")}}}, + }, + }, + }, + }, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "callDoctor") + assert.NoError(t, err) + assert.Equal(t, "/do/0/raiseAlarm/fork/branches/1/callDoctor", jsonPointer) +} + +// TestGenerateJSONPointer_DeepNestedTask tests multiple nested task levels. +func TestGenerateJSONPointer_DeepNestedTask(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "deep-nested"}, + Do: &model.TaskList{ + &model.TaskItem{ + Key: "step1", + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: false, + Branches: &model.TaskList{ + &model.TaskItem{ + Key: "branchA", + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Branches: &model.TaskList{ + &model.TaskItem{ + Key: "deepTask", + Task: &model.SetTask{Set: map[string]interface{}{"result": "done"}}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "deepTask") + assert.NoError(t, err) + assert.Equal(t, "/do/0/step1/fork/branches/0/branchA/fork/branches/0/deepTask", jsonPointer) +} + +// TestGenerateJSONPointer_NonExistentTask checks for a task that doesn't exist. +func TestGenerateJSONPointer_NonExistentTask(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "nonexistent-test"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "taskA", Task: &model.SetTask{Set: map[string]interface{}{"value": 5}}}, + }, + } + + _, err := GenerateJSONPointer(workflow, "taskX") + assert.Error(t, err) +} + +// TestGenerateJSONPointer_MixedTaskTypes verifies a workflow with different task types. +func TestGenerateJSONPointer_MixedTaskTypes(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{Name: "mixed-tasks"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "compute", Task: &model.SetTask{Set: map[string]interface{}{"result": 42}}}, + &model.TaskItem{Key: "notify", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "post", Endpoint: model.NewEndpoint("https://api.notify.com")}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "notify") + assert.NoError(t, err) + assert.Equal(t, "/do/1/notify", jsonPointer) +} diff --git a/impl/runner.go b/impl/runner.go new file mode 100644 index 0000000..33d852a --- /dev/null +++ b/impl/runner.go @@ -0,0 +1,226 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "fmt" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var _ WorkflowRunner = &workflowRunnerImpl{} +var _ TaskSupport = &workflowRunnerImpl{} + +// WorkflowRunner is the public API to run Workflows +type WorkflowRunner interface { + GetWorkflowDef() *model.Workflow + Run(input interface{}) (output interface{}, err error) + GetWorkflowCtx() ctx.WorkflowContext +} + +func NewDefaultRunner(workflow *model.Workflow) (WorkflowRunner, error) { + wfContext, err := ctx.NewWorkflowContext(workflow) + if err != nil { + return nil, err + } + // TODO: based on the workflow definition, the context might change. + objCtx := ctx.WithWorkflowContext(context.Background(), wfContext) + return &workflowRunnerImpl{ + Workflow: workflow, + Context: objCtx, + RunnerCtx: wfContext, + }, nil +} + +type workflowRunnerImpl struct { + Workflow *model.Workflow + Context context.Context + RunnerCtx ctx.WorkflowContext +} + +func (wr *workflowRunnerImpl) CloneWithContext(newCtx context.Context) TaskSupport { + clonedWfCtx := wr.RunnerCtx.Clone() + + ctxWithWf := ctx.WithWorkflowContext(newCtx, clonedWfCtx) + + return &workflowRunnerImpl{ + Workflow: wr.Workflow, + Context: ctxWithWf, + RunnerCtx: clonedWfCtx, + } +} + +func (wr *workflowRunnerImpl) RemoveLocalExprVars(keys ...string) { + wr.RunnerCtx.RemoveLocalExprVars(keys...) +} + +func (wr *workflowRunnerImpl) AddLocalExprVars(vars map[string]interface{}) { + wr.RunnerCtx.AddLocalExprVars(vars) +} + +func (wr *workflowRunnerImpl) SetLocalExprVars(vars map[string]interface{}) { + wr.RunnerCtx.SetLocalExprVars(vars) +} + +func (wr *workflowRunnerImpl) SetTaskReferenceFromName(taskName string) error { + ref, err := GenerateJSONPointer(wr.Workflow, taskName) + if err != nil { + return err + } + wr.RunnerCtx.SetTaskReference(ref) + return nil +} + +func (wr *workflowRunnerImpl) GetTaskReference() string { + return wr.RunnerCtx.GetTaskReference() +} + +func (wr *workflowRunnerImpl) SetTaskRawInput(input interface{}) { + wr.RunnerCtx.SetTaskRawInput(input) +} + +func (wr *workflowRunnerImpl) SetTaskRawOutput(output interface{}) { + wr.RunnerCtx.SetTaskRawOutput(output) +} + +func (wr *workflowRunnerImpl) SetTaskDef(task model.Task) error { + return wr.RunnerCtx.SetTaskDef(task) +} + +func (wr *workflowRunnerImpl) SetTaskStartedAt(startedAt time.Time) { + wr.RunnerCtx.SetTaskStartedAt(startedAt) +} + +func (wr *workflowRunnerImpl) SetTaskName(name string) { + wr.RunnerCtx.SetTaskName(name) +} + +func (wr *workflowRunnerImpl) GetContext() context.Context { + return wr.Context +} + +func (wr *workflowRunnerImpl) GetWorkflowCtx() ctx.WorkflowContext { + return wr.RunnerCtx +} + +func (wr *workflowRunnerImpl) SetTaskStatus(task string, status ctx.StatusPhase) { + wr.RunnerCtx.SetTaskStatus(task, status) +} + +func (wr *workflowRunnerImpl) GetWorkflowDef() *model.Workflow { + return wr.Workflow +} + +func (wr *workflowRunnerImpl) SetWorkflowInstanceCtx(value interface{}) { + wr.RunnerCtx.SetInstanceCtx(value) +} + +// Run executes the workflow synchronously. +func (wr *workflowRunnerImpl) Run(input interface{}) (output interface{}, err error) { + defer func() { + if err != nil { + wr.RunnerCtx.SetStatus(ctx.FaultedStatus) + err = wr.wrapWorkflowError(err) + } + }() + + wr.RunnerCtx.SetRawInput(input) + + // Process input + if input, err = wr.processInput(input); err != nil { + return nil, err + } + + wr.RunnerCtx.SetInput(input) + // Run tasks sequentially + wr.RunnerCtx.SetStatus(ctx.RunningStatus) + doRunner, err := NewDoTaskRunner(wr.Workflow.Do) + if err != nil { + return nil, err + } + wr.RunnerCtx.SetStartedAt(time.Now()) + output, err = doRunner.Run(wr.RunnerCtx.GetInput(), wr) + if err != nil { + return nil, err + } + + wr.RunnerCtx.ClearTaskContext() + + // Process output + if output, err = wr.processOutput(output); err != nil { + return nil, err + } + + wr.RunnerCtx.SetOutput(output) + wr.RunnerCtx.SetStatus(ctx.CompletedStatus) + return output, nil +} + +// wrapWorkflowError ensures workflow errors have a proper instance reference. +func (wr *workflowRunnerImpl) wrapWorkflowError(err error) error { + taskReference := wr.RunnerCtx.GetTaskReference() + if len(taskReference) == 0 { + taskReference = "/" + } + if knownErr := model.AsError(err); knownErr != nil { + return knownErr.WithInstanceRef(wr.Workflow, taskReference) + } + return model.NewErrRuntime(fmt.Errorf("workflow '%s', task '%s': %w", wr.Workflow.Document.Name, taskReference, err), taskReference) +} + +// processInput validates and transforms input if needed. +func (wr *workflowRunnerImpl) processInput(input interface{}) (output interface{}, err error) { + if wr.Workflow.Input != nil { + if wr.Workflow.Input.Schema != nil { + if err = utils.ValidateSchema(input, wr.Workflow.Input.Schema, "/"); err != nil { + return nil, err + } + } + + if wr.Workflow.Input.From != nil { + output, err = expr.TraverseAndEvaluateObj(wr.Workflow.Input.From, input, "/", wr.Context) + if err != nil { + return nil, err + } + return output, nil + } + } + return input, nil +} + +// processOutput applies output transformations. +func (wr *workflowRunnerImpl) processOutput(output interface{}) (interface{}, error) { + if wr.Workflow.Output != nil { + if wr.Workflow.Output.As != nil { + var err error + output, err = expr.TraverseAndEvaluateObj(wr.Workflow.Output.As, output, "/", wr.Context) + if err != nil { + return nil, err + } + } + if wr.Workflow.Output.Schema != nil { + if err := utils.ValidateSchema(output, wr.Workflow.Output.Schema, "/"); err != nil { + return nil, err + } + } + } + return output, nil +} diff --git a/impl/runner_test.go b/impl/runner_test.go new file mode 100644 index 0000000..5acdb6b --- /dev/null +++ b/impl/runner_test.go @@ -0,0 +1,469 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/parser" + "github.com/stretchr/testify/assert" +) + +type taskSupportOpts func(*workflowRunnerImpl) + +// newTaskSupport returns an instance of TaskSupport for test purposes +func newTaskSupport(opts ...taskSupportOpts) TaskSupport { + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + if err != nil { + panic(fmt.Errorf("failed to create workflow context within the test environment: %v", err)) + } + + ts := &workflowRunnerImpl{ + Workflow: nil, + Context: context.TODO(), + RunnerCtx: wfCtx, + } + + // Apply each functional option to ts + for _, opt := range opts { + opt(ts) + } + + return ts +} + +//nolint:unused +func withWorkflow(wf *model.Workflow) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.Workflow = wf + } +} + +//nolint:unused +func withContext(ctx context.Context) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.Context = ctx + } +} + +func withRunnerCtx(workflowContext ctx.WorkflowContext) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.RunnerCtx = workflowContext + } +} + +// runWorkflowTest is a reusable test function for workflows +func runWorkflowTest(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}) { + // Run the workflow + output, err := runWorkflow(t, workflowPath, input, expectedOutput) + assert.NoError(t, err) + + assertWorkflowRun(t, expectedOutput, output) +} + +func runWorkflowWithErr(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}, assertErr func(error)) { + output, err := runWorkflow(t, workflowPath, input, expectedOutput) + assert.Error(t, err) + assertErr(err) + assertWorkflowRun(t, expectedOutput, output) +} + +func runWorkflow(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}) (output interface{}, err error) { + // Read the workflow YAML from the testdata directory + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + + // Parse the YAML workflow + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + + // Initialize the workflow runner + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) + + // Run the workflow + output, err = runner.Run(input) + return output, err +} + +func assertWorkflowRun(t *testing.T, expectedOutput map[string]interface{}, output interface{}) { + if expectedOutput == nil { + assert.Nil(t, output, "Expected nil Workflow run output") + } else { + assert.Equal(t, expectedOutput, output, "Workflow output mismatch") + } +} + +// TestWorkflowRunner_Run_YAML validates multiple workflows +func TestWorkflowRunner_Run_YAML(t *testing.T) { + // Workflow 1: Chained Set Tasks + t.Run("Chained Set Tasks", func(t *testing.T) { + workflowPath := "./testdata/chained_set_tasks.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "tripled": float64(60), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 2: Concatenating Strings + t.Run("Concatenating Strings", func(t *testing.T) { + workflowPath := "./testdata/concatenating_strings.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "fullName": "John Doe", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 3: Conditional Logic + t.Run("Conditional Logic", func(t *testing.T) { + workflowPath := "./testdata/conditional_logic.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Conditional Logic", func(t *testing.T) { + workflowPath := "./testdata/sequential_set_colors.yaml" + // Define the input and expected output + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "resultColors": []interface{}{"red", "green", "blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + t.Run("input From", func(t *testing.T) { + workflowPath := "./testdata/sequential_set_colors_output_as.yaml" + // Define the input and expected output + expectedOutput := map[string]interface{}{ + "result": []interface{}{"red", "green", "blue"}, + } + runWorkflowTest(t, workflowPath, nil, expectedOutput) + }) + t.Run("input From", func(t *testing.T) { + workflowPath := "./testdata/conditional_logic_input_from.yaml" + // Define the input and expected output + input := map[string]interface{}{ + "localWeather": map[string]interface{}{ + "temperature": 34, + }, + } + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_WithSchemaValidation(t *testing.T) { + // Workflow 1: Workflow input Schema Validation + t.Run("Workflow input Schema Validation - Valid input", func(t *testing.T) { + workflowPath := "./testdata/workflow_input_schema.yaml" + input := map[string]interface{}{ + "key": "value", + } + expectedOutput := map[string]interface{}{ + "outputKey": "value", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Workflow input Schema Validation - Invalid input", func(t *testing.T) { + workflowPath := "./testdata/workflow_input_schema.yaml" + input := map[string]interface{}{ + "wrongKey": "value", + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid input") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + // Workflow 2: Task input Schema Validation + t.Run("Task input Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_input_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": 42, + } + expectedOutput := map[string]interface{}{ + "taskOutputKey": 84, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Task input Schema Validation - Invalid input", func(t *testing.T) { + workflowPath := "./testdata/task_input_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": "invalidValue", + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid task input") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + // Workflow 3: Task output Schema Validation + t.Run("Task output Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": "value", + } + expectedOutput := map[string]interface{}{ + "finalOutputKey": "resultValue", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Task output Schema Validation - Invalid output", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema_with_dynamic_value.yaml" + input := map[string]interface{}{ + "taskInputKey": 123, // Invalid value (not a string) + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid task output") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + t.Run("Task output Schema Validation - Valid output", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema_with_dynamic_value.yaml" + input := map[string]interface{}{ + "taskInputKey": "validValue", // Valid value + } + expectedOutput := map[string]interface{}{ + "finalOutputKey": "validValue", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 4: Task Export Schema Validation + t.Run("Task Export Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_export_schema.yaml" + input := map[string]interface{}{ + "key": "value", + } + expectedOutput := map[string]interface{}{ + "exportedKey": "value", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_ControlFlow(t *testing.T) { + t.Run("Set Tasks with Then Directive", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_with_then.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "result": float64(90), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Set Tasks with Termination", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_with_termination.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "finalValue": float64(20), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Set Tasks with Invalid Then Reference", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_invalid_then.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "partialResult": float64(15), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_RaiseTasks(t *testing.T) { + // TODO: add $workflow context to the expr processing + t.Run("Raise Inline Error", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_inline.yaml", nil, nil, func(err error) { + assert.Equal(t, model.ErrorTypeValidation, model.AsError(err).Type.String()) + assert.Equal(t, "Invalid input provided to workflow raise-inline", model.AsError(err).Detail.String()) + }) + }) + + t.Run("Raise Referenced Error", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_reusable.yaml", nil, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthentication, model.AsError(err).Type.String()) + }) + }) + + t.Run("Raise Error with Dynamic Detail", func(t *testing.T) { + input := map[string]interface{}{ + "reason": "User token expired", + } + runWorkflowWithErr(t, "./testdata/raise_error_with_input.yaml", input, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthentication, model.AsError(err).Type.String()) + assert.Equal(t, "User authentication failed: User token expired", model.AsError(err).Detail.String()) + }) + }) + + t.Run("Raise Undefined Error Reference", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_undefined_reference.yaml", nil, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeValidation, model.AsError(err).Type.String()) + }) + }) +} + +func TestWorkflowRunner_Run_YAML_RaiseTasks_ControlFlow(t *testing.T) { + t.Run("Raise Error with Conditional Logic", func(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "age": 16, + }, + } + runWorkflowWithErr(t, "./testdata/raise_conditional.yaml", input, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthorization, model.AsError(err).Type.String()) + assert.Equal(t, "User is under the required age", model.AsError(err).Detail.String()) + }) + }) +} + +func TestForTaskRunner_Run(t *testing.T) { + t.Run("Simple For with Colors", func(t *testing.T) { + workflowPath := "./testdata/for_colors.yaml" + input := map[string]interface{}{ + "colors": []string{"red", "green", "blue"}, + } + expectedOutput := map[string]interface{}{ + "processed": map[string]interface{}{ + "colors": []interface{}{"red", "green", "blue"}, + "indexes": []interface{}{0, 1, 2}, + }, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("SUM Numbers", func(t *testing.T) { + workflowPath := "./testdata/for_sum_numbers.yaml" + input := map[string]interface{}{ + "numbers": []int32{2, 3, 4}, + } + expectedOutput := map[string]interface{}{ + "result": interface{}(9), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("For Nested Loops", func(t *testing.T) { + workflowPath := "./testdata/for_nested_loops.yaml" + input := map[string]interface{}{ + "fruits": []interface{}{"apple", "banana"}, + "colors": []interface{}{"red", "green"}, + } + expectedOutput := map[string]interface{}{ + "matrix": []interface{}{ + []interface{}{"apple", "red"}, + []interface{}{"apple", "green"}, + []interface{}{"banana", "red"}, + []interface{}{"banana", "green"}, + }, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + +} + +func TestSwitchTaskRunner_Run(t *testing.T) { + t.Run("Color is red", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "red", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"red"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Color is green", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "green", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"green"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Color is blue", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "blue", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestSwitchTaskRunner_DefaultCase(t *testing.T) { + t.Run("Color is unknown, should match default", func(t *testing.T) { + workflowPath := "./testdata/switch_with_default.yaml" + input := map[string]interface{}{ + "color": "yellow", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"default"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestForkSimple_NoCompete(t *testing.T) { + t.Run("Create a color array", func(t *testing.T) { + workflowPath := "./testdata/fork_simple.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"red", "blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} diff --git a/impl/task_runner.go b/impl/task_runner.go new file mode 100644 index 0000000..f825f79 --- /dev/null +++ b/impl/task_runner.go @@ -0,0 +1,59 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var _ TaskRunner = &SetTaskRunner{} +var _ TaskRunner = &RaiseTaskRunner{} +var _ TaskRunner = &ForTaskRunner{} +var _ TaskRunner = &DoTaskRunner{} + +type TaskRunner interface { + Run(input interface{}, taskSupport TaskSupport) (interface{}, error) + GetTaskName() string +} + +type TaskSupport interface { + SetTaskStatus(task string, status ctx.StatusPhase) + GetWorkflowDef() *model.Workflow + // SetWorkflowInstanceCtx is the `$context` variable accessible in JQ expressions and set in `export.as` + SetWorkflowInstanceCtx(value interface{}) + // GetContext gets the sharable Workflow context. Accessible via ctx.GetWorkflowContext. + GetContext() context.Context + SetTaskRawInput(value interface{}) + SetTaskRawOutput(value interface{}) + SetTaskDef(task model.Task) error + SetTaskStartedAt(value time.Time) + SetTaskName(name string) + // SetTaskReferenceFromName based on the taskName and the model.Workflow definition, set the JSON Pointer reference to the context + SetTaskReferenceFromName(taskName string) error + GetTaskReference() string + // SetLocalExprVars overrides local variables in expression processing + SetLocalExprVars(vars map[string]interface{}) + // AddLocalExprVars adds to the local variables in expression processing. Won't override previous entries. + AddLocalExprVars(vars map[string]interface{}) + // RemoveLocalExprVars removes local variables added in AddLocalExprVars or SetLocalExprVars + RemoveLocalExprVars(keys ...string) + // CloneWithContext returns a full clone of this TaskSupport, but using + // the provided context.Context (so deadlines/cancellations propagate). + CloneWithContext(ctx context.Context) TaskSupport +} diff --git a/impl/task_runner_call_http.go b/impl/task_runner_call_http.go new file mode 100644 index 0000000..3093506 --- /dev/null +++ b/impl/task_runner_call_http.go @@ -0,0 +1,44 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +type CallHTTPTaskRunner struct { + TaskName string +} + +func NewCallHttpRunner(taskName string, task *model.CallHTTP) (taskRunner *CallHTTPTaskRunner, err error) { + if task == nil { + err = model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) + } else { + taskRunner = new(CallHTTPTaskRunner) + taskRunner.TaskName = taskName + } + return +} + +func (f *CallHTTPTaskRunner) Run(input interface{}, taskSupport TaskSupport) (interface{}, error) { + return input, nil + +} + +func (f *CallHTTPTaskRunner) GetTaskName() string { + return f.TaskName +} diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go new file mode 100644 index 0000000..8b63bfc --- /dev/null +++ b/impl/task_runner_do.go @@ -0,0 +1,252 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +// NewTaskRunner creates a TaskRunner instance based on the task type. +func NewTaskRunner(taskName string, task model.Task, workflowDef *model.Workflow) (TaskRunner, error) { + switch t := task.(type) { + case *model.SetTask: + return NewSetTaskRunner(taskName, t) + case *model.RaiseTask: + return NewRaiseTaskRunner(taskName, t, workflowDef) + case *model.DoTask: + return NewDoTaskRunner(t.Do) + case *model.ForTask: + return NewForTaskRunner(taskName, t) + case *model.CallHTTP: + return NewCallHttpRunner(taskName, t) + case *model.ForkTask: + return NewForkTaskRunner(taskName, t, workflowDef) + default: + return nil, fmt.Errorf("unsupported task type '%T' for task '%s'", t, taskName) + } +} + +func NewDoTaskRunner(taskList *model.TaskList) (*DoTaskRunner, error) { + return &DoTaskRunner{ + TaskList: taskList, + }, nil +} + +type DoTaskRunner struct { + TaskList *model.TaskList +} + +func (d *DoTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { + if d.TaskList == nil { + return input, nil + } + return d.runTasks(input, taskSupport) +} + +func (d *DoTaskRunner) GetTaskName() string { + return "" +} + +// runTasks runs all defined tasks sequentially. +func (d *DoTaskRunner) runTasks(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { + output = input + if d.TaskList == nil { + return output, nil + } + + idx := 0 + currentTask := (*d.TaskList)[idx] + + for currentTask != nil { + if err = taskSupport.SetTaskDef(currentTask); err != nil { + return nil, err + } + if err = taskSupport.SetTaskReferenceFromName(currentTask.Key); err != nil { + return nil, err + } + + if shouldRun, err := d.shouldRunTask(input, taskSupport, currentTask); err != nil { + return output, err + } else if !shouldRun { + idx, currentTask = d.TaskList.Next(idx) + continue + } + + taskSupport.SetTaskStatus(currentTask.Key, ctx.PendingStatus) + + // Check if this task is a SwitchTask and handle it + if switchTask, ok := currentTask.Task.(*model.SwitchTask); ok { + flowDirective, err := d.evaluateSwitchTask(input, taskSupport, currentTask.Key, switchTask) + if err != nil { + taskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) + return output, err + } + taskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) + + // Process FlowDirective: update idx/currentTask accordingly + idx, currentTask = d.TaskList.KeyAndIndex(flowDirective.Value) + if currentTask == nil { + return nil, fmt.Errorf("flow directive target '%s' not found", flowDirective.Value) + } + continue + } + + runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, taskSupport.GetWorkflowDef()) + if err != nil { + return output, err + } + + taskSupport.SetTaskStatus(currentTask.Key, ctx.RunningStatus) + if output, err = d.runTask(input, taskSupport, runner, currentTask.Task.GetBase()); err != nil { + taskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) + return output, err + } + + taskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) + input = utils.DeepCloneValue(output) + idx, currentTask = d.TaskList.Next(idx) + } + + return output, nil +} + +func (d *DoTaskRunner) shouldRunTask(input interface{}, taskSupport TaskSupport, task *model.TaskItem) (bool, error) { + if task.GetBase().If != nil { + output, err := expr.TraverseAndEvaluateBool(task.GetBase().If.String(), input, taskSupport.GetContext()) + if err != nil { + return false, model.NewErrExpression(err, task.Key) + } + return output, nil + } + return true, nil +} + +func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskSupport TaskSupport, taskKey string, switchTask *model.SwitchTask) (*model.FlowDirective, error) { + var defaultThen *model.FlowDirective + for _, switchItem := range switchTask.Switch { + for _, switchCase := range switchItem { + if switchCase.When == nil { + defaultThen = switchCase.Then + continue + } + result, err := expr.TraverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, taskSupport.GetContext()) + if err != nil { + return nil, model.NewErrExpression(err, taskKey) + } + if result { + if switchCase.Then == nil { + return nil, model.NewErrExpression(fmt.Errorf("missing 'then' directive in matched switch case"), taskKey) + } + return switchCase.Then, nil + } + } + } + if defaultThen != nil { + return defaultThen, nil + } + return nil, model.NewErrExpression(fmt.Errorf("no matching switch case"), taskKey) +} + +// runTask executes an individual task. +func (d *DoTaskRunner) runTask(input interface{}, taskSupport TaskSupport, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { + taskName := runner.GetTaskName() + + taskSupport.SetTaskStartedAt(time.Now()) + taskSupport.SetTaskRawInput(input) + taskSupport.SetTaskName(taskName) + + if task.Input != nil { + if input, err = d.processTaskInput(task, input, taskSupport, taskName); err != nil { + return nil, err + } + } + + output, err = runner.Run(input, taskSupport) + if err != nil { + return nil, err + } + + taskSupport.SetTaskRawOutput(output) + + if output, err = d.processTaskOutput(task, output, taskSupport, taskName); err != nil { + return nil, err + } + + if err = d.processTaskExport(task, output, taskSupport, taskName); err != nil { + return nil, err + } + + return output, nil +} + +// processTaskInput processes task input validation and transformation. +func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interface{}, taskSupport TaskSupport, taskName string) (output interface{}, err error) { + if task.Input == nil { + return taskInput, nil + } + + if err = utils.ValidateSchema(taskInput, task.Input.Schema, taskName); err != nil { + return nil, err + } + + if output, err = expr.TraverseAndEvaluateObj(task.Input.From, taskInput, taskName, taskSupport.GetContext()); err != nil { + return nil, err + } + + return output, nil +} + +// processTaskOutput processes task output validation and transformation. +func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interface{}, taskSupport TaskSupport, taskName string) (output interface{}, err error) { + if task.Output == nil { + return taskOutput, nil + } + + if output, err = expr.TraverseAndEvaluateObj(task.Output.As, taskOutput, taskName, taskSupport.GetContext()); err != nil { + return nil, err + } + + if err = utils.ValidateSchema(output, task.Output.Schema, taskName); err != nil { + return nil, err + } + + return output, nil +} + +func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interface{}, taskSupport TaskSupport, taskName string) (err error) { + if task.Export == nil { + return nil + } + + output, err := expr.TraverseAndEvaluateObj(task.Export.As, taskOutput, taskName, taskSupport.GetContext()) + if err != nil { + return err + } + + if err = utils.ValidateSchema(output, task.Export.Schema, taskName); err != nil { + return nil + } + + taskSupport.SetWorkflowInstanceCtx(output) + + return nil +} diff --git a/impl/task_runner_for.go b/impl/task_runner_for.go new file mode 100644 index 0000000..90461f9 --- /dev/null +++ b/impl/task_runner_for.go @@ -0,0 +1,134 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "reflect" + "strings" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +const ( + forTaskDefaultEach = "$item" + forTaskDefaultAt = "$index" +) + +func NewForTaskRunner(taskName string, task *model.ForTask) (*ForTaskRunner, error) { + if task == nil || task.Do == nil { + return nil, model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) + } + + doRunner, err := NewDoTaskRunner(task.Do) + if err != nil { + return nil, err + } + + return &ForTaskRunner{ + Task: task, + TaskName: taskName, + DoRunner: doRunner, + }, nil +} + +type ForTaskRunner struct { + Task *model.ForTask + TaskName string + DoRunner *DoTaskRunner +} + +func (f *ForTaskRunner) Run(input interface{}, taskSupport TaskSupport) (interface{}, error) { + defer func() { + // clear local variables + taskSupport.RemoveLocalExprVars(f.Task.For.Each, f.Task.For.At) + }() + f.sanitizeFor() + in, err := expr.TraverseAndEvaluate(f.Task.For.In, input, taskSupport.GetContext()) + if err != nil { + return nil, err + } + + forOutput := input + rv := reflect.ValueOf(in) + switch rv.Kind() { + case reflect.Slice, reflect.Array: + for i := 0; i < rv.Len(); i++ { + item := rv.Index(i).Interface() + + if forOutput, err = f.processForItem(i, item, taskSupport, forOutput); err != nil { + return nil, err + } + if f.Task.While != "" { + whileIsTrue, err := expr.TraverseAndEvaluateBool(f.Task.While, forOutput, taskSupport.GetContext()) + if err != nil { + return nil, err + } + if !whileIsTrue { + break + } + } + } + case reflect.Invalid: + return input, nil + default: + if forOutput, err = f.processForItem(0, in, taskSupport, forOutput); err != nil { + return nil, err + } + } + + return forOutput, nil +} + +func (f *ForTaskRunner) processForItem(idx int, item interface{}, taskSupport TaskSupport, forOutput interface{}) (interface{}, error) { + forVars := map[string]interface{}{ + f.Task.For.At: idx, + f.Task.For.Each: item, + } + // Instead of Set, we Add since other tasks in this very same context might be adding variables to the context + taskSupport.AddLocalExprVars(forVars) + // output from previous iterations are merged together + var err error + forOutput, err = f.DoRunner.Run(forOutput, taskSupport) + if err != nil { + return nil, err + } + + return forOutput, nil +} + +func (f *ForTaskRunner) sanitizeFor() { + f.Task.For.Each = strings.TrimSpace(f.Task.For.Each) + f.Task.For.At = strings.TrimSpace(f.Task.For.At) + + if f.Task.For.Each == "" { + f.Task.For.Each = forTaskDefaultEach + } + if f.Task.For.At == "" { + f.Task.For.At = forTaskDefaultAt + } + + if !strings.HasPrefix(f.Task.For.Each, "$") { + f.Task.For.Each = "$" + f.Task.For.Each + } + if !strings.HasPrefix(f.Task.For.At, "$") { + f.Task.For.At = "$" + f.Task.For.At + } +} + +func (f *ForTaskRunner) GetTaskName() string { + return f.TaskName +} diff --git a/impl/task_runner_fork.go b/impl/task_runner_fork.go new file mode 100644 index 0000000..9a68399 --- /dev/null +++ b/impl/task_runner_fork.go @@ -0,0 +1,120 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "fmt" + "sync" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewForkTaskRunner(taskName string, task *model.ForkTask, workflowDef *model.Workflow) (*ForkTaskRunner, error) { + if task == nil || task.Fork.Branches == nil { + return nil, model.NewErrValidation(fmt.Errorf("invalid Fork task %s", taskName), taskName) + } + + var runners []TaskRunner + for _, branchItem := range *task.Fork.Branches { + r, err := NewTaskRunner(branchItem.Key, branchItem.Task, workflowDef) + if err != nil { + return nil, err + } + runners = append(runners, r) + } + + return &ForkTaskRunner{ + Task: task, + TaskName: taskName, + BranchRunners: runners, + }, nil +} + +type ForkTaskRunner struct { + Task *model.ForkTask + TaskName string + BranchRunners []TaskRunner +} + +func (f ForkTaskRunner) GetTaskName() string { + return f.TaskName +} + +func (f ForkTaskRunner) Run(input interface{}, parentSupport TaskSupport) (interface{}, error) { + cancelCtx, cancel := context.WithCancel(parentSupport.GetContext()) + defer cancel() + + n := len(f.BranchRunners) + results := make([]interface{}, n) + errs := make(chan error, n) + done := make(chan struct{}) + resultCh := make(chan interface{}, 1) + + var ( + wg sync.WaitGroup + once sync.Once // <-- declare a Once + ) + + for i, runner := range f.BranchRunners { + wg.Add(1) + go func(i int, runner TaskRunner) { + defer wg.Done() + // **Isolate context** for each branch! + branchSupport := parentSupport.CloneWithContext(cancelCtx) + + select { + case <-cancelCtx.Done(): + return + default: + } + + out, err := runner.Run(input, branchSupport) + if err != nil { + errs <- err + return + } + results[i] = out + + if f.Task.Fork.Compete { + select { + case resultCh <- out: + once.Do(func() { + cancel() // **signal cancellation** to all other branches + close(done) // signal we have a winner + }) + default: + } + } + }(i, runner) + } + + if f.Task.Fork.Compete { + select { + case <-done: + return <-resultCh, nil + case err := <-errs: + return nil, err + } + } + + wg.Wait() + select { + case err := <-errs: + return nil, err + default: + } + return results, nil +} diff --git a/impl/task_runner_fork_test.go b/impl/task_runner_fork_test.go new file mode 100644 index 0000000..f38b817 --- /dev/null +++ b/impl/task_runner_fork_test.go @@ -0,0 +1,101 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "testing" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +// dummyRunner simulates a TaskRunner that returns its name after an optional delay. +type dummyRunner struct { + name string + delay time.Duration +} + +func (d *dummyRunner) GetTaskName() string { + return d.name +} + +func (d *dummyRunner) Run(input interface{}, ts TaskSupport) (interface{}, error) { + select { + case <-ts.GetContext().Done(): + // canceled + return nil, ts.GetContext().Err() + case <-time.After(d.delay): + // complete after delay + return d.name, nil + } +} + +func TestForkTaskRunner_NonCompete(t *testing.T) { + // Prepare a TaskSupport with a background context + ts := newTaskSupport(withContext(context.Background())) + + // Two branches that complete immediately + branches := []TaskRunner{ + &dummyRunner{name: "r1", delay: 0}, + &dummyRunner{name: "r2", delay: 0}, + } + fork := ForkTaskRunner{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: false, + }, + }, + TaskName: "fork", + BranchRunners: branches, + } + + output, err := fork.Run("in", ts) + assert.NoError(t, err) + + results, ok := output.([]interface{}) + assert.True(t, ok, "expected output to be []interface{}") + assert.Equal(t, []interface{}{"r1", "r2"}, results) +} + +func TestForkTaskRunner_Compete(t *testing.T) { + // Prepare a TaskSupport with a background context + ts := newTaskSupport(withContext(context.Background())) + + // One fast branch and one slow branch + branches := []TaskRunner{ + &dummyRunner{name: "fast", delay: 10 * time.Millisecond}, + &dummyRunner{name: "slow", delay: 50 * time.Millisecond}, + } + fork := ForkTaskRunner{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: true, + }, + }, + TaskName: "fork", + BranchRunners: branches, + } + + start := time.Now() + output, err := fork.Run("in", ts) + elapsed := time.Since(start) + + assert.NoError(t, err) + assert.Equal(t, "fast", output) + // ensure compete returns before the slow branch would finish + assert.Less(t, elapsed, 50*time.Millisecond, "compete should cancel the slow branch") +} diff --git a/impl/task_runner_raise.go b/impl/task_runner_raise.go new file mode 100644 index 0000000..dddaf0c --- /dev/null +++ b/impl/task_runner_raise.go @@ -0,0 +1,105 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, workflowDef *model.Workflow) (*RaiseTaskRunner, error) { + if err := resolveErrorDefinition(task, workflowDef); err != nil { + return nil, err + } + + if task.Raise.Error.Definition == nil { + return nil, model.NewErrValidation(fmt.Errorf("no raise configuration provided for RaiseTask %s", taskName), taskName) + } + return &RaiseTaskRunner{ + Task: task, + TaskName: taskName, + }, nil +} + +// TODO: can e refactored to a definition resolver callable from the context +func resolveErrorDefinition(t *model.RaiseTask, workflowDef *model.Workflow) error { + if workflowDef != nil && t.Raise.Error.Ref != nil { + notFoundErr := model.NewErrValidation(fmt.Errorf("%v error definition not found in 'uses'", t.Raise.Error.Ref), "") + if workflowDef.Use != nil && workflowDef.Use.Errors != nil { + definition, ok := workflowDef.Use.Errors[*t.Raise.Error.Ref] + if !ok { + return notFoundErr + } + t.Raise.Error.Definition = definition + return nil + } + return notFoundErr + } + return nil +} + +type RaiseTaskRunner struct { + Task *model.RaiseTask + TaskName string +} + +var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ + model.ErrorTypeAuthentication: model.NewErrAuthentication, + model.ErrorTypeValidation: model.NewErrValidation, + model.ErrorTypeCommunication: model.NewErrCommunication, + model.ErrorTypeAuthorization: model.NewErrAuthorization, + model.ErrorTypeConfiguration: model.NewErrConfiguration, + model.ErrorTypeExpression: model.NewErrExpression, + model.ErrorTypeRuntime: model.NewErrRuntime, + model.ErrorTypeTimeout: model.NewErrTimeout, +} + +func (r *RaiseTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { + output = input + // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition + var detailResult interface{} + detailResult, err = expr.TraverseAndEvaluateObj(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) + if err != nil { + return nil, err + } + + var titleResult interface{} + titleResult, err = expr.TraverseAndEvaluateObj(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) + if err != nil { + return nil, err + } + + instance := taskSupport.GetTaskReference() + + var raiseErr *model.Error + if raiseErrF, ok := raiseErrFuncMapping[r.Task.Raise.Error.Definition.Type.String()]; ok { + raiseErr = raiseErrF(fmt.Errorf("%v", detailResult), instance) + } else { + raiseErr = r.Task.Raise.Error.Definition + raiseErr.Detail = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", detailResult)) + raiseErr.Instance = &model.JsonPointerOrRuntimeExpression{Value: instance} + } + + raiseErr.Title = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", titleResult)) + err = raiseErr + + return output, err +} + +func (r *RaiseTaskRunner) GetTaskName() string { + return r.TaskName +} diff --git a/impl/task_runner_raise_test.go b/impl/task_runner_raise_test.go new file mode 100644 index 0000000..3de0aae --- /dev/null +++ b/impl/task_runner_raise_test.go @@ -0,0 +1,177 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +func TestRaiseTaskRunner_WithDefinedError(t *testing.T) { + input := map[string]interface{}{} + + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeValidation), + Status: 400, + Title: model.NewStringOrRuntimeExpr("Validation Error"), + Detail: model.NewStringOrRuntimeExpr("Invalid input data"), + }, + }, + }, + } + + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + assert.NoError(t, err) + wfCtx.SetTaskReference("task_raise_defined") + + taskSupport := newTaskSupport(withRunnerCtx(wfCtx)) + runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, taskSupport.GetWorkflowDef()) + assert.NoError(t, err) + + output, err := runner.Run(input, taskSupport) + assert.Equal(t, output, input) + assert.Error(t, err) + + expectedErr := model.NewErrValidation(errors.New("Invalid input data"), "task_raise_defined") + + var modelErr *model.Error + if errors.As(err, &modelErr) { + assert.Equal(t, expectedErr.Type.String(), modelErr.Type.String()) + assert.Equal(t, expectedErr.Status, modelErr.Status) + assert.Equal(t, expectedErr.Title.String(), modelErr.Title.String()) + assert.Equal(t, "Invalid input data", modelErr.Detail.String()) + assert.Equal(t, expectedErr.Instance.String(), modelErr.Instance.String()) + } else { + t.Errorf("expected error of type *model.Error but got %T", err) + } +} + +func TestRaiseTaskRunner_WithReferencedError(t *testing.T) { + ref := "someErrorRef" + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Ref: &ref, + }, + }, + } + + runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, &model.Workflow{}) + assert.Error(t, err) + assert.Nil(t, runner) +} + +func TestRaiseTaskRunner_TimeoutErrorWithExpression(t *testing.T) { + input := map[string]interface{}{ + "timeoutMessage": "Request took too long", + } + + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeTimeout), + Status: 408, + Title: model.NewStringOrRuntimeExpr("Timeout Error"), + Detail: model.NewStringOrRuntimeExpr("${ .timeoutMessage }"), + }, + }, + }, + } + + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + assert.NoError(t, err) + wfCtx.SetTaskReference("task_raise_timeout_expr") + + taskSupport := newTaskSupport(withRunnerCtx(wfCtx)) + runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, taskSupport.GetWorkflowDef()) + assert.NoError(t, err) + + output, err := runner.Run(input, taskSupport) + assert.Equal(t, input, output) + assert.Error(t, err) + + expectedErr := model.NewErrTimeout(errors.New("Request took too long"), "task_raise_timeout_expr") + + var modelErr *model.Error + if errors.As(err, &modelErr) { + assert.Equal(t, expectedErr.Type.String(), modelErr.Type.String()) + assert.Equal(t, expectedErr.Status, modelErr.Status) + assert.Equal(t, expectedErr.Title.String(), modelErr.Title.String()) + assert.Equal(t, "Request took too long", modelErr.Detail.String()) + assert.Equal(t, expectedErr.Instance.String(), modelErr.Instance.String()) + } else { + t.Errorf("expected error of type *model.Error but got %T", err) + } +} + +func TestRaiseTaskRunner_Serialization(t *testing.T) { + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeRuntime), + Status: 500, + Title: model.NewStringOrRuntimeExpr("Runtime Error"), + Detail: model.NewStringOrRuntimeExpr("Unexpected failure"), + Instance: &model.JsonPointerOrRuntimeExpression{Value: "/task_runtime"}, + }, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + + var deserializedTask model.RaiseTask + err = json.Unmarshal(data, &deserializedTask) + assert.NoError(t, err) + + assert.Equal(t, raiseTask.Raise.Error.Definition.Type.String(), deserializedTask.Raise.Error.Definition.Type.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Status, deserializedTask.Raise.Error.Definition.Status) + assert.Equal(t, raiseTask.Raise.Error.Definition.Title.String(), deserializedTask.Raise.Error.Definition.Title.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Detail.String(), deserializedTask.Raise.Error.Definition.Detail.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Instance.String(), deserializedTask.Raise.Error.Definition.Instance.String()) +} + +func TestRaiseTaskRunner_ReferenceSerialization(t *testing.T) { + ref := "errorReference" + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Ref: &ref, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + + var deserializedTask model.RaiseTask + err = json.Unmarshal(data, &deserializedTask) + assert.NoError(t, err) + + assert.Equal(t, *raiseTask.Raise.Error.Ref, *deserializedTask.Raise.Error.Ref) + assert.Nil(t, deserializedTask.Raise.Error.Definition) +} diff --git a/impl/task_runner_set.go b/impl/task_runner_set.go new file mode 100644 index 0000000..f2aaaa9 --- /dev/null +++ b/impl/task_runner_set.go @@ -0,0 +1,58 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewSetTaskRunner(taskName string, task *model.SetTask) (*SetTaskRunner, error) { + if task == nil || task.Set == nil { + return nil, model.NewErrValidation(fmt.Errorf("no set configuration provided for SetTask %s", taskName), taskName) + } + return &SetTaskRunner{ + Task: task, + TaskName: taskName, + }, nil +} + +type SetTaskRunner struct { + Task *model.SetTask + TaskName string +} + +func (s *SetTaskRunner) GetTaskName() string { + return s.TaskName +} + +func (s *SetTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { + setObject := utils.DeepClone(s.Task.Set) + result, err := expr.TraverseAndEvaluateObj(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, taskSupport.GetContext()) + if err != nil { + return nil, err + } + + output, ok := result.(map[string]interface{}) + if !ok { + return nil, model.NewErrRuntime(fmt.Errorf("expected output to be a map[string]interface{}, but got a different type. Got: %v", result), s.TaskName) + } + + return output, nil +} diff --git a/impl/task_runner_set_test.go b/impl/task_runner_set_test.go new file mode 100644 index 0000000..c02d76d --- /dev/null +++ b/impl/task_runner_set_test.go @@ -0,0 +1,416 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "reflect" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +func TestSetTaskExecutor_Exec(t *testing.T) { + input := map[string]interface{}{ + "configuration": map[string]interface{}{ + "size": map[string]interface{}{ + "width": 6, + "height": 6, + }, + "fill": map[string]interface{}{ + "red": 69, + "green": 69, + "blue": 69, + }, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "shape": "circle", + "size": "${ .configuration.size }", + "fill": "${ .configuration.fill }", + }, + } + + executor, err := NewSetTaskRunner("task1", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "shape": "circle", + "size": map[string]interface{}{ + "width": 6, + "height": 6, + }, + "fill": map[string]interface{}{ + "red": 69, + "green": 69, + "blue": 69, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_StaticValues(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "completed", + "count": 10, + }, + } + + executor, err := NewSetTaskRunner("task_static", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "completed", + "count": 10, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_RuntimeExpressions(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "firstName": "John", + "lastName": "Doe", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "fullName": "${ \"\\(.user.firstName) \\(.user.lastName)\" }", + }, + } + + executor, err := NewSetTaskRunner("task_runtime_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "fullName": "John Doe", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_NestedStructures(t *testing.T) { + input := map[string]interface{}{ + "order": map[string]interface{}{ + "id": 12345, + "items": []interface{}{"item1", "item2"}, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "orderDetails": map[string]interface{}{ + "orderId": "${ .order.id }", + "itemCount": "${ .order.items | length }", + }, + }, + } + + executor, err := NewSetTaskRunner("task_nested_structures", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "orderDetails": map[string]interface{}{ + "orderId": 12345, + "itemCount": 2, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_StaticAndDynamicValues(t *testing.T) { + input := map[string]interface{}{ + "config": map[string]interface{}{ + "threshold": 100, + }, + "metrics": map[string]interface{}{ + "current": 75, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "active", + "remaining": "${ .config.threshold - .metrics.current }", + }, + } + + executor, err := NewSetTaskRunner("task_static_dynamic", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "active", + "remaining": 25, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_MissingInputData(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "value": "${ .missingField }", + }, + } + + executor, err := NewSetTaskRunner("task_missing_input", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + assert.Nil(t, output.(map[string]interface{})["value"]) +} + +func TestSetTaskExecutor_ExpressionsWithFunctions(t *testing.T) { + input := map[string]interface{}{ + "values": []interface{}{1, 2, 3, 4, 5}, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "sum": "${ .values | map(.) | add }", + }, + } + + executor, err := NewSetTaskRunner("task_expr_functions", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "sum": 15, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ConditionalExpressions(t *testing.T) { + input := map[string]interface{}{ + "temperature": 30, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "weather": "${ if .temperature > 25 then 'hot' else 'cold' end }", + }, + } + + executor, err := NewSetTaskRunner("task_conditional_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ArrayDynamicIndex(t *testing.T) { + input := map[string]interface{}{ + "items": []interface{}{"apple", "banana", "cherry"}, + "index": 1, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "selectedItem": "${ .items[.index] }", + }, + } + + executor, err := NewSetTaskRunner("task_array_indexing", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "selectedItem": "banana", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_NestedConditionalLogic(t *testing.T) { + input := map[string]interface{}{ + "age": 20, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "${ if .age < 18 then 'minor' else if .age < 65 then 'adult' else 'senior' end end }", + }, + } + + executor, err := NewSetTaskRunner("task_nested_condition", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "adult", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_DefaultValues(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "value": "${ .missingField // 'defaultValue' }", + }, + } + + executor, err := NewSetTaskRunner("task_default_values", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "value": "defaultValue", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ComplexNestedStructures(t *testing.T) { + input := map[string]interface{}{ + "config": map[string]interface{}{ + "dimensions": map[string]interface{}{ + "width": 10, + "height": 5, + }, + }, + "meta": map[string]interface{}{ + "color": "blue", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "shape": map[string]interface{}{ + "type": "rectangle", + "width": "${ .config.dimensions.width }", + "height": "${ .config.dimensions.height }", + "color": "${ .meta.color }", + "area": "${ .config.dimensions.width * .config.dimensions.height }", + }, + }, + } + + executor, err := NewSetTaskRunner("task_complex_nested", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "shape": map[string]interface{}{ + "type": "rectangle", + "width": 10, + "height": 5, + "color": "blue", + "area": 50, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_MultipleExpressions(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "name": "Alice", + "email": "alice@example.com", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "username": "${ .user.name }", + "contact": "${ .user.email }", + }, + } + + executor, err := NewSetTaskRunner("task_multiple_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input, newTaskSupport()) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "username": "Alice", + "contact": "alice@example.com", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} diff --git a/impl/testdata/chained_set_tasks.yaml b/impl/testdata/chained_set_tasks.yaml new file mode 100644 index 0000000..8ee9a9c --- /dev/null +++ b/impl/testdata/chained_set_tasks.yaml @@ -0,0 +1,29 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: chained-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + baseValue: 10 + - task2: + set: + doubled: "${ .baseValue * 2 }" + - task3: + set: + tripled: "${ .doubled * 3 }" diff --git a/impl/testdata/concatenating_strings.yaml b/impl/testdata/concatenating_strings.yaml new file mode 100644 index 0000000..22cd1b2 --- /dev/null +++ b/impl/testdata/concatenating_strings.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: concatenating-strings + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + firstName: "John" + lastName: "" + - task2: + set: + firstName: "${ .firstName }" + lastName: "Doe" + - task3: + set: + fullName: "${ .firstName + ' ' + .lastName }" diff --git a/impl/testdata/conditional_logic.yaml b/impl/testdata/conditional_logic.yaml new file mode 100644 index 0000000..30135a5 --- /dev/null +++ b/impl/testdata/conditional_logic.yaml @@ -0,0 +1,26 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: conditional-logic + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + temperature: 30 + - task2: + set: + weather: "${ if .temperature > 25 then 'hot' else 'cold' end }" diff --git a/impl/testdata/conditional_logic_input_from.yaml b/impl/testdata/conditional_logic_input_from.yaml new file mode 100644 index 0000000..f64f3e8 --- /dev/null +++ b/impl/testdata/conditional_logic_input_from.yaml @@ -0,0 +1,25 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: conditional-logic + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +input: + from: "${ .localWeather }" +do: + - task2: + set: + weather: "${ if .temperature > 25 then 'hot' else 'cold' end }" diff --git a/impl/testdata/for_colors.yaml b/impl/testdata/for_colors.yaml new file mode 100644 index 0000000..ac33620 --- /dev/null +++ b/impl/testdata/for_colors.yaml @@ -0,0 +1,28 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: for + version: '1.0.0' +do: + - loopColors: + for: + each: color + in: '${ .colors }' + do: + - markProcessed: + set: + processed: '${ { colors: (.processed.colors + [ $color ]), indexes: (.processed.indexes + [ $index ])} }' diff --git a/impl/testdata/for_nested_loops.yaml b/impl/testdata/for_nested_loops.yaml new file mode 100644 index 0000000..3bef556 --- /dev/null +++ b/impl/testdata/for_nested_loops.yaml @@ -0,0 +1,35 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: for-tests + name: nested-loops + version: '1.0.0' +do: + - outerLoop: + for: + in: ${ .fruits } + each: fruit + at: fruitIdx + do: + - innerLoop: + for: + in: ${ $input.colors } + each: color + at: colorIdx + do: + - combinePair: + set: + matrix: ${ .matrix + [[$fruit, $color]] } diff --git a/impl/testdata/for_sum_numbers.yaml b/impl/testdata/for_sum_numbers.yaml new file mode 100644 index 0000000..afc81e9 --- /dev/null +++ b/impl/testdata/for_sum_numbers.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: for-tests + name: sum-numbers + version: '1.0.0' +do: + - sumLoop: + for: + in: ${ .numbers } + do: + - addNumber: + set: + total: ${ .total + $item } + - finalize: + set: + result: ${ .total } diff --git a/impl/testdata/fork_simple.yaml b/impl/testdata/fork_simple.yaml new file mode 100644 index 0000000..044b1e2 --- /dev/null +++ b/impl/testdata/fork_simple.yaml @@ -0,0 +1,33 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: test + name: fork-example + version: '0.1.0' +do: + - branchColors: + fork: + compete: false + branches: + - setRed: + set: + color1: red + - setBlue: + set: + color2: blue + - joinResult: + set: + colors: "${ [.[] | .[]] }" diff --git a/impl/testdata/raise_conditional.yaml b/impl/testdata/raise_conditional.yaml new file mode 100644 index 0000000..2d9f809 --- /dev/null +++ b/impl/testdata/raise_conditional.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# $schema: https://raw.githubusercontent.com/serverlessworkflow/specification/refs/heads/main/schema/workflow.yaml +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-conditional + version: '1.0.0' +do: + - underageError: + if: ${ .user.age < 18 } + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authorization + status: 403 + title: Authorization Error + detail: "User is under the required age" + - continueProcess: + set: + message: "User is allowed" diff --git a/impl/testdata/raise_error_with_input.yaml b/impl/testdata/raise_error_with_input.yaml new file mode 100644 index 0000000..96affe1 --- /dev/null +++ b/impl/testdata/raise_error_with_input.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-with-input + version: '1.0.0' +do: + - dynamicError: + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authentication + status: 401 + title: Authentication Error + detail: '${ "User authentication failed: \( .reason )" }' diff --git a/impl/testdata/raise_inline.yaml b/impl/testdata/raise_inline.yaml new file mode 100644 index 0000000..940528a --- /dev/null +++ b/impl/testdata/raise_inline.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-inline + version: '1.0.0' +do: + - inlineError: + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/validation + status: 400 + title: Validation Error + detail: ${ "Invalid input provided to workflow \($workflow.definition.document.name)" } diff --git a/impl/testdata/raise_reusable.yaml b/impl/testdata/raise_reusable.yaml new file mode 100644 index 0000000..33a203d --- /dev/null +++ b/impl/testdata/raise_reusable.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-reusable + version: '1.0.0' +use: + errors: + AuthenticationError: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authentication + status: 401 + title: Authentication Error + detail: "User is not authenticated" +do: + - authError: + raise: + error: AuthenticationError diff --git a/impl/testdata/raise_undefined_reference.yaml b/impl/testdata/raise_undefined_reference.yaml new file mode 100644 index 0000000..1316818 --- /dev/null +++ b/impl/testdata/raise_undefined_reference.yaml @@ -0,0 +1,23 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-undefined-reference + version: '1.0.0' +do: + - missingError: + raise: + error: UndefinedError diff --git a/impl/testdata/sequential_set_colors.yaml b/impl/testdata/sequential_set_colors.yaml new file mode 100644 index 0000000..b956c71 --- /dev/null +++ b/impl/testdata/sequential_set_colors.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: default + name: do + version: '1.0.0' +do: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } + output: + as: "${ { resultColors: .colors } }" \ No newline at end of file diff --git a/impl/testdata/sequential_set_colors_output_as.yaml b/impl/testdata/sequential_set_colors_output_as.yaml new file mode 100644 index 0000000..53c4919 --- /dev/null +++ b/impl/testdata/sequential_set_colors_output_as.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: default + name: do + version: '1.0.0' +do: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } +output: + as: "${ { result: .colors } }" \ No newline at end of file diff --git a/impl/testdata/set_tasks_invalid_then.yaml b/impl/testdata/set_tasks_invalid_then.yaml new file mode 100644 index 0000000..325c0c2 --- /dev/null +++ b/impl/testdata/set_tasks_invalid_then.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: invalid-then-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + partialResult: 15 + then: nonExistentTask + - task2: + set: + skipped: true diff --git a/impl/testdata/set_tasks_with_termination.yaml b/impl/testdata/set_tasks_with_termination.yaml new file mode 100644 index 0000000..3c819bd --- /dev/null +++ b/impl/testdata/set_tasks_with_termination.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: termination-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalValue: 20 + then: end + - task2: + set: + skipped: true diff --git a/impl/testdata/set_tasks_with_then.yaml b/impl/testdata/set_tasks_with_then.yaml new file mode 100644 index 0000000..e0f8155 --- /dev/null +++ b/impl/testdata/set_tasks_with_then.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: then-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + value: 30 + then: task3 + - task2: + set: + skipped: true + - task3: + set: + result: "${ .value * 3 }" diff --git a/impl/testdata/switch_match.yaml b/impl/testdata/switch_match.yaml new file mode 100644 index 0000000..4f913af --- /dev/null +++ b/impl/testdata/switch_match.yaml @@ -0,0 +1,43 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: switch-match + version: '1.0.0' +do: + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - blue: + when: '.color == "blue"' + then: setBlue + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: end + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' + then: end diff --git a/impl/testdata/switch_with_default.yaml b/impl/testdata/switch_with_default.yaml new file mode 100644 index 0000000..8a4f1b9 --- /dev/null +++ b/impl/testdata/switch_with_default.yaml @@ -0,0 +1,43 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: switch-with-default + version: '1.0.0' + +do: + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - fallback: + then: setDefault + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: end + - setDefault: + set: + colors: '${ .colors + [ "default" ] }' + then: end diff --git a/impl/testdata/task_export_schema.yaml b/impl/testdata/task_export_schema.yaml new file mode 100644 index 0000000..e63e869 --- /dev/null +++ b/impl/testdata/task_export_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-export-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + exportedKey: "${ .key }" + export: + schema: + format: "json" + document: + type: "object" + properties: + exportedKey: + type: "string" + required: ["exportedKey"] diff --git a/impl/testdata/task_input_schema.yaml b/impl/testdata/task_input_schema.yaml new file mode 100644 index 0000000..d93b574 --- /dev/null +++ b/impl/testdata/task_input_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-input-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + input: + schema: + format: "json" + document: + type: "object" + properties: + taskInputKey: + type: "number" + required: ["taskInputKey"] + set: + taskOutputKey: "${ .taskInputKey * 2 }" diff --git a/impl/testdata/task_output_schema.yaml b/impl/testdata/task_output_schema.yaml new file mode 100644 index 0000000..73d784b --- /dev/null +++ b/impl/testdata/task_output_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-output-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalOutputKey: "resultValue" + output: + schema: + format: "json" + document: + type: "object" + properties: + finalOutputKey: + type: "string" + required: ["finalOutputKey"] diff --git a/impl/testdata/task_output_schema_with_dynamic_value.yaml b/impl/testdata/task_output_schema_with_dynamic_value.yaml new file mode 100644 index 0000000..39a7df9 --- /dev/null +++ b/impl/testdata/task_output_schema_with_dynamic_value.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-output-schema-with-dynamic-value + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalOutputKey: "${ .taskInputKey }" + output: + schema: + format: "json" + document: + type: "object" + properties: + finalOutputKey: + type: "string" + required: ["finalOutputKey"] diff --git a/impl/testdata/workflow_input_schema.yaml b/impl/testdata/workflow_input_schema.yaml new file mode 100644 index 0000000..fabf484 --- /dev/null +++ b/impl/testdata/workflow_input_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: workflow-input-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +input: + schema: + format: "json" + document: + type: "object" + properties: + key: + type: "string" + required: ["key"] +do: + - task1: + set: + outputKey: "${ .key }" diff --git a/impl/utils/json_schema.go b/impl/utils/json_schema.go new file mode 100644 index 0000000..9b91553 --- /dev/null +++ b/impl/utils/json_schema.go @@ -0,0 +1,79 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/xeipuuv/gojsonschema" +) + +// validateJSONSchema validates the provided data against a model.Schema. +func validateJSONSchema(data interface{}, schema *model.Schema) error { + if schema == nil { + return nil + } + + schema.ApplyDefaults() + + if schema.Format != model.DefaultSchema { + return fmt.Errorf("unsupported schema format: '%s'", schema.Format) + } + + var schemaJSON string + if schema.Document != nil { + documentBytes, err := json.Marshal(schema.Document) + if err != nil { + return fmt.Errorf("failed to marshal schema document to JSON: %w", err) + } + schemaJSON = string(documentBytes) + } else if schema.Resource != nil { + // TODO: Handle external resource references (not implemented here) + return errors.New("external resources are not yet supported") + } else { + return errors.New("schema must have either a 'Document' or 'Resource'") + } + + schemaLoader := gojsonschema.NewStringLoader(schemaJSON) + dataLoader := gojsonschema.NewGoLoader(data) + + result, err := gojsonschema.Validate(schemaLoader, dataLoader) + if err != nil { + // TODO: use model.Error + return fmt.Errorf("failed to validate JSON schema: %w", err) + } + + if !result.Valid() { + var validationErrors string + for _, err := range result.Errors() { + validationErrors += fmt.Sprintf("- %s\n", err.String()) + } + return fmt.Errorf("JSON schema validation failed:\n%s", validationErrors) + } + + return nil +} + +func ValidateSchema(data interface{}, schema *model.Schema, taskName string) error { + if schema != nil { + if err := validateJSONSchema(data, schema); err != nil { + return model.NewErrValidation(err, taskName) + } + } + return nil +} diff --git a/impl/utils/utils.go b/impl/utils/utils.go new file mode 100644 index 0000000..f444139 --- /dev/null +++ b/impl/utils/utils.go @@ -0,0 +1,38 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +// DeepClone a map to avoid modifying the original object +func DeepClone(obj map[string]interface{}) map[string]interface{} { + clone := make(map[string]interface{}) + for key, value := range obj { + clone[key] = DeepCloneValue(value) + } + return clone +} + +func DeepCloneValue(value interface{}) interface{} { + if m, ok := value.(map[string]interface{}); ok { + return DeepClone(m) + } + if s, ok := value.([]interface{}); ok { + clonedSlice := make([]interface{}, len(s)) + for i, v := range s { + clonedSlice[i] = DeepCloneValue(v) + } + return clonedSlice + } + return value +} diff --git a/maintainer_guidelines.md b/maintainer_guidelines.md index ecd1b11..d40d33b 100644 --- a/maintainer_guidelines.md +++ b/maintainer_guidelines.md @@ -16,7 +16,7 @@ Here are a few tips for repository maintainers. ## Branch Management -The `main` branch is is the bleeding edge. New major versions of the module +The `main` branch is the bleeding edge. New major versions of the module are cut from this branch and tagged. If you intend to submit a pull request you should use `main HEAD` as your starting point. diff --git a/model/auth.go b/model/auth.go deleted file mode 100644 index 60e9aa1..0000000 --- a/model/auth.go +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "reflect" - - validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidation(AuthDefinitionsStructLevelValidation, AuthDefinitions{}) -} - -// AuthDefinitionsStructLevelValidation custom validator for unique name of the auth methods -func AuthDefinitionsStructLevelValidation(structLevel validator.StructLevel) { - authDefs := structLevel.Current().Interface().(AuthDefinitions) - dict := map[string]bool{} - if authDefs.Defs != nil && len(authDefs.Defs) > 1 { - for _, a := range authDefs.Defs { - if !dict[a.Name] { - dict[a.Name] = true - } else { - structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") - } - } - } -} - -// AuthDefinitions used to define authentication information applied to resources defined in the operation property of function definitions -type AuthDefinitions struct { - Defs []Auth -} - -// AuthType ... -type AuthType string - -const ( - // AuthTypeBasic ... - AuthTypeBasic AuthType = "basic" - // AuthTypeBearer ... - AuthTypeBearer AuthType = "bearer" - // AuthTypeOAuth2 ... - AuthTypeOAuth2 AuthType = "oauth2" -) - -// GrantType ... -type GrantType string - -const ( - // GrantTypePassword ... - GrantTypePassword GrantType = "password" - // GrantTypeClientCredentials ... - GrantTypeClientCredentials GrantType = "clientCredentials" - // GrantTypeTokenExchange ... - GrantTypeTokenExchange GrantType = "tokenExchange" -) - -// authTypesMapping map to support JSON unmarshalling when guessing the auth scheme -var authTypesMapping = map[AuthType]AuthProperties{ - AuthTypeBasic: &BasicAuthProperties{}, - AuthTypeBearer: &BearerAuthProperties{}, - AuthTypeOAuth2: &OAuth2AuthProperties{}, -} - -// Auth ... -type Auth struct { - // Name Unique auth definition name - Name string `json:"name" validate:"required"` - // Scheme Defines the auth type - Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` - // Properties ... - Properties AuthProperties `json:"properties" validate:"required"` -} - -// UnmarshalJSON implements json.Unmarshaler -func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { - if len(b) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - // See if we can guess based on the first character - switch b[0] { - case '{': - return a.unmarshalSingle(b) - case '[': - return a.unmarshalMany(b) - } - return nil -} - -func (a *AuthDefinitions) unmarshalSingle(data []byte) error { - var auth Auth - err := json.Unmarshal(data, &auth) - if err != nil { - return err - } - a.Defs = []Auth{auth} - return nil -} - -func (a *AuthDefinitions) unmarshalMany(data []byte) error { - var auths []Auth - err := json.Unmarshal(data, &auths) - if err != nil { - return err - } - - a.Defs = auths - return nil -} - -// UnmarshalJSON Auth definition -func (a *Auth) UnmarshalJSON(data []byte) error { - auth := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &auth); err != nil { - // it's a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - // call us recursively - if err := json.Unmarshal(file, &a); err != nil { - return err - } - return nil - } - if err := unmarshalKey("scheme", auth, &a.Scheme); err != nil { - return err - } - if err := unmarshalKey("name", auth, &a.Name); err != nil { - return err - } - - if len(a.Scheme) == 0 { - a.Scheme = AuthTypeBasic - } - if _, ok := authTypesMapping[a.Scheme]; !ok { - return fmt.Errorf("authentication scheme %s not supported", authTypesMapping["type"]) - } - // we take the type we want to unmarshal based on the scheme - authProperties := authTypesMapping[a.Scheme] - if err := unmarshalKey("properties", auth, authProperties); err != nil { - return err - } - - a.Properties = authProperties - return nil -} - -// AuthProperties ... -type AuthProperties interface { - // GetMetadata ... - GetMetadata() *Metadata - // GetSecret ... - GetSecret() string -} - -// BaseAuthProperties ... -type BaseAuthProperties struct { - Common - // Secret Expression referencing a workflow secret that contains all needed auth info - Secret string `json:"secret,omitempty"` -} - -// UnmarshalJSON ... -func (b *BaseAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - b.Secret, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - if err := unmarshalKey("secret", properties, &b.Secret); err != nil { - return err - } - return nil -} - -// GetMetadata ... -func (b *BaseAuthProperties) GetMetadata() *Metadata { - return &b.Metadata -} - -// GetSecret ... -func (b *BaseAuthProperties) GetSecret() string { - return b.Secret -} - -// BasicAuthProperties Basic Auth Info -type BasicAuthProperties struct { - BaseAuthProperties - // Username String or a workflow expression. Contains the username - Username string `json:"username" validate:"required"` - // Password String or a workflow expression. Contains the user password - Password string `json:"password" validate:"required"` -} - -// UnmarshalJSON ... -func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("username", properties, &b.Username); err != nil { - return err - } - if err := unmarshalKey("password", properties, &b.Password); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - return nil -} - -// BearerAuthProperties Bearer auth information -type BearerAuthProperties struct { - BaseAuthProperties - // Token String or a workflow expression. Contains the token - Token string `json:"token" validate:"required"` -} - -// UnmarshalJSON ... -func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("token", properties, &b.Token); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - return nil -} - -// OAuth2AuthProperties OAuth2 information -type OAuth2AuthProperties struct { - BaseAuthProperties - // Authority String or a workflow expression. Contains the authority information - Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` - // GrantType Defines the grant type - GrantType GrantType `json:"grantType" validate:"required"` - // ClientID String or a workflow expression. Contains the client identifier - ClientID string `json:"clientId" validate:"required"` - // ClientSecret Workflow secret or a workflow expression. Contains the client secret - ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` - // Scopes Array containing strings or workflow expressions. Contains the OAuth2 scopes - Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` - // Username String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner' - Username string `json:"username,omitempty" validate:"omitempty,min=1"` - // Password String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner' - Password string `json:"password,omitempty" validate:"omitempty,min=1"` - // Audiences Array containing strings or workflow expressions. Contains the OAuth2 audiences - Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` - // SubjectToken String or a workflow expression. Contains the subject token - SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` - // RequestedSubject String or a workflow expression. Contains the requested subject - RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` - // RequestedIssuer String or a workflow expression. Contains the requested issuer - RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` -} - -// TODO: use reflection to unmarshal the keys and think on a generic approach to handle them - -// UnmarshalJSON ... -func (b *OAuth2AuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("authority", properties, &b.Authority); err != nil { - return err - } - if err := unmarshalKey("grantType", properties, &b.GrantType); err != nil { - return err - } - if err := unmarshalKey("clientId", properties, &b.ClientID); err != nil { - return err - } - if err := unmarshalKey("clientSecret", properties, &b.ClientSecret); err != nil { - return err - } - if err := unmarshalKey("scopes", properties, &b.Scopes); err != nil { - return err - } - if err := unmarshalKey("username", properties, &b.Username); err != nil { - return err - } - if err := unmarshalKey("password", properties, &b.Password); err != nil { - return err - } - if err := unmarshalKey("audiences", properties, &b.Audiences); err != nil { - return err - } - if err := unmarshalKey("subjectToken", properties, &b.SubjectToken); err != nil { - return err - } - if err := unmarshalKey("requestedSubject", properties, &b.RequestedSubject); err != nil { - return err - } - if err := unmarshalKey("requestedIssuer", properties, &b.RequestedIssuer); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - return nil -} diff --git a/model/authentication.go b/model/authentication.go new file mode 100644 index 0000000..35f06a4 --- /dev/null +++ b/model/authentication.go @@ -0,0 +1,187 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// AuthenticationPolicy Defines an authentication policy. +type AuthenticationPolicy struct { + Basic *BasicAuthenticationPolicy `json:"basic,omitempty"` + Bearer *BearerAuthenticationPolicy `json:"bearer,omitempty"` + Digest *DigestAuthenticationPolicy `json:"digest,omitempty"` + OAuth2 *OAuth2AuthenticationPolicy `json:"oauth2,omitempty"` + OIDC *OpenIdConnectAuthenticationPolicy `json:"oidc,omitempty"` +} + +// UnmarshalJSON for AuthenticationPolicy to enforce "oneOf" behavior. +func (ap *AuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Create temporary maps to detect which field is populated + temp := struct { + Basic json.RawMessage `json:"basic"` + Bearer json.RawMessage `json:"bearer"` + Digest json.RawMessage `json:"digest"` + OAuth2 json.RawMessage `json:"oauth2"` + OIDC json.RawMessage `json:"oidc"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if len(temp.Basic) > 0 { + count++ + ap.Basic = &BasicAuthenticationPolicy{} + if err := json.Unmarshal(temp.Basic, ap.Basic); err != nil { + return err + } + } + if len(temp.Bearer) > 0 { + count++ + ap.Bearer = &BearerAuthenticationPolicy{} + if err := json.Unmarshal(temp.Bearer, ap.Bearer); err != nil { + return err + } + } + if len(temp.Digest) > 0 { + count++ + ap.Digest = &DigestAuthenticationPolicy{} + if err := json.Unmarshal(temp.Digest, ap.Digest); err != nil { + return err + } + } + if len(temp.OAuth2) > 0 { + count++ + ap.OAuth2 = &OAuth2AuthenticationPolicy{} + if err := json.Unmarshal(temp.OAuth2, ap.OAuth2); err != nil { + return err + } + } + if len(temp.OIDC) > 0 { + count++ + ap.OIDC = &OpenIdConnectAuthenticationPolicy{} + if err := json.Unmarshal(temp.OIDC, ap.OIDC); err != nil { + return err + } + } + + // Ensure only one field is set + if count != 1 { + return errors.New("invalid AuthenticationPolicy: only one authentication type must be specified") + } + return nil +} + +// MarshalJSON for AuthenticationPolicy. +func (ap *AuthenticationPolicy) MarshalJSON() ([]byte, error) { + if ap.Basic != nil { + return json.Marshal(map[string]interface{}{"basic": ap.Basic}) + } + if ap.Bearer != nil { + return json.Marshal(map[string]interface{}{"bearer": ap.Bearer}) + } + if ap.Digest != nil { + return json.Marshal(map[string]interface{}{"digest": ap.Digest}) + } + if ap.OAuth2 != nil { + return json.Marshal(map[string]interface{}{"oauth2": ap.OAuth2}) + } + if ap.OIDC != nil { + return json.Marshal(map[string]interface{}{"oidc": ap.OIDC}) + } + // Add logic for other fields... + return nil, errors.New("invalid AuthenticationPolicy: no valid configuration to marshal") +} + +// ReferenceableAuthenticationPolicy represents a referenceable authentication policy. +type ReferenceableAuthenticationPolicy struct { + Use *string `json:"use,omitempty"` + AuthenticationPolicy *AuthenticationPolicy `json:",inline"` +} + +// UnmarshalJSON for ReferenceableAuthenticationPolicy enforces the "oneOf" behavior. +func (rap *ReferenceableAuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Temporary structure to detect which field is populated + temp := struct { + Use *string `json:"use"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Check if `use` is set + if temp.Use != nil { + rap.Use = temp.Use + return nil + } + + // If `use` is not set, try unmarshalling inline AuthenticationPolicy + var ap AuthenticationPolicy + if err := json.Unmarshal(data, &ap); err != nil { + return err + } + + rap.AuthenticationPolicy = &ap + return nil +} + +// MarshalJSON for ReferenceableAuthenticationPolicy. +func (rap *ReferenceableAuthenticationPolicy) MarshalJSON() ([]byte, error) { + if rap.Use != nil { + return json.Marshal(map[string]interface{}{"use": rap.Use}) + } + if rap.AuthenticationPolicy != nil { + return json.Marshal(rap.AuthenticationPolicy) + } + return nil, errors.New("invalid ReferenceableAuthenticationPolicy: no valid configuration to marshal") +} + +func NewBasicAuth(username, password string) *AuthenticationPolicy { + return &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: username, + Password: password, + }} +} + +// BasicAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type BasicAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,basic_policy"` +} + +// BearerAuthenticationPolicy supports either an inline token or a secret reference (use). +type BearerAuthenticationPolicy struct { + Token string `json:"token,omitempty" validate:"required_without=Use,bearer_policy"` + Use string `json:"use,omitempty" validate:"required_without=Token"` +} + +// DigestAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type DigestAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,digest_policy"` +} + +// OpenIdConnectAuthenticationPolicy Use OpenIdConnect authentication. +type OpenIdConnectAuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"omitempty,required_without=Use"` + Use string `json:"use,omitempty" validate:"omitempty,required_without=Properties"` +} diff --git a/model/authentication_oauth.go b/model/authentication_oauth.go new file mode 100644 index 0000000..e6e5f54 --- /dev/null +++ b/model/authentication_oauth.go @@ -0,0 +1,212 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Endpoints are composed here and not on a separate wrapper object to avoid too many nested objects and inline marshaling. +// This allows us to reuse OAuth2AuthenticationProperties also on OpenIdConnectAuthenticationPolicy + +type OAuth2AuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"required_without=Use"` + Endpoints *OAuth2Endpoints `json:"endpoints,omitempty"` + Use string `json:"use,omitempty" validate:"oauth2_policy"` +} + +func (o *OAuth2AuthenticationPolicy) ApplyDefaults() { + if o.Endpoints == nil { + return + } + + // Apply defaults if the respective fields are empty + if o.Endpoints.Token == "" { + o.Endpoints.Token = OAuth2DefaultTokenURI + } + if o.Endpoints.Revocation == "" { + o.Endpoints.Revocation = OAuth2DefaultRevokeURI + } + if o.Endpoints.Introspection == "" { + o.Endpoints.Introspection = OAuth2DefaultIntrospectionURI + } +} + +func (o *OAuth2AuthenticationPolicy) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationPolicy + aux := &struct { + *Alias + }{ + Alias: (*Alias)(o), + } + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Initialize Properties if any field for it is set + if o.Properties == nil && containsOAuth2Properties(data) { + o.Properties = &OAuth2AuthenticationProperties{} + if err := json.Unmarshal(data, o.Properties); err != nil { + return err + } + } + + return nil +} + +func containsOAuth2Properties(data []byte) bool { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return false + } + for key := range raw { + if key != "use" { + return true + } + } + return false +} + +// MarshalJSON customizes the JSON output for OAuth2AuthenticationPolicy +func (o *OAuth2AuthenticationPolicy) MarshalJSON() ([]byte, error) { + o.ApplyDefaults() + // Create a map to hold the resulting JSON + result := make(map[string]interface{}) + + // Inline Properties fields if present + if o.Properties != nil { + propertiesJSON, err := json.Marshal(o.Properties) + if err != nil { + return nil, err + } + + var propertiesMap map[string]interface{} + if err := json.Unmarshal(propertiesJSON, &propertiesMap); err != nil { + return nil, err + } + + for key, value := range propertiesMap { + result[key] = value + } + } + + // Add the Use field if present + if o.Use != "" { + result["use"] = o.Use + } + + return json.Marshal(result) +} + +type OAuth2AuthenticationProperties struct { + Authority URITemplate `json:"authority,omitempty"` + Grant OAuth2AuthenticationDataGrant `json:"grant,omitempty" validate:"oneof='authorization_code' 'client_credentials' 'password' 'refresh_token' 'urn:ietf:params:oauth:grant-type:token-exchange'"` + Client *OAuth2AutenthicationDataClient `json:"client,omitempty"` + Request *OAuth2TokenRequest `json:"request,omitempty"` + Issuers []string `json:"issuers,omitempty"` + Scopes []string `json:"scopes,omitempty"` + Audiences []string `json:"audiences,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + Subject *OAuth2Token `json:"subject,omitempty"` + Actor *OAuth2Token `json:"actor,omitempty"` +} + +func (o *OAuth2AuthenticationProperties) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationProperties + aux := &struct { + Authority json.RawMessage `json:"authority"` + *Alias + }{ + Alias: (*Alias)(o), + } + + if err := json.Unmarshal(data, &aux); err != nil { + return fmt.Errorf("failed to unmarshal OAuth2AuthenticationProperties: %w", err) + } + + // Unmarshal the Authority field + if aux.Authority != nil { + uri, err := UnmarshalURITemplate(aux.Authority) + if err != nil { + return fmt.Errorf("invalid authority URI: %w", err) + } + o.Authority = uri + } + + return nil +} + +// OAuth2AuthenticationDataGrant represents the grant type to use in OAuth2 authentication. +type OAuth2AuthenticationDataGrant string + +// Valid grant types +const ( + AuthorizationCodeGrant OAuth2AuthenticationDataGrant = "authorization_code" + ClientCredentialsGrant OAuth2AuthenticationDataGrant = "client_credentials" + PasswordGrant OAuth2AuthenticationDataGrant = "password" + RefreshTokenGrant OAuth2AuthenticationDataGrant = "refresh_token" + TokenExchangeGrant OAuth2AuthenticationDataGrant = "urn:ietf:params:oauth:grant-type:token-exchange" // #nosec G101 +) + +type OAuthClientAuthenticationType string + +const ( + OAuthClientAuthClientSecretBasic OAuthClientAuthenticationType = "client_secret_basic" + OAuthClientAuthClientSecretPost OAuthClientAuthenticationType = "client_secret_post" + OAuthClientAuthClientSecretJWT OAuthClientAuthenticationType = "client_secret_jwt" + OAuthClientAuthPrivateKeyJWT OAuthClientAuthenticationType = "private_key_jwt" + OAuthClientAuthNone OAuthClientAuthenticationType = "none" +) + +type OAuth2TokenRequestEncodingType string + +const ( + EncodingTypeFormUrlEncoded OAuth2TokenRequestEncodingType = "application/x-www-form-urlencoded" + EncodingTypeApplicationJson OAuth2TokenRequestEncodingType = "application/json" +) + +// OAuth2AutenthicationDataClient The definition of an OAuth2 client. +type OAuth2AutenthicationDataClient struct { + ID string `json:"id,omitempty"` + Secret string `json:"secret,omitempty"` + Assertion string `json:"assertion,omitempty"` + Authentication OAuthClientAuthenticationType `json:"authentication,omitempty" validate:"client_auth_type"` +} + +type OAuth2TokenRequest struct { + Encoding OAuth2TokenRequestEncodingType `json:"encoding" validate:"encoding_type"` +} + +// OAuth2Token Represents an OAuth2 token. +type OAuth2Token struct { + // Token The security token to use + Token string `json:"token,omitempty"` + // Type The type of the security token to use. + Type string `json:"type,omitempty"` +} + +type OAuth2Endpoints struct { + Token string `json:"token,omitempty"` + Revocation string `json:"revocation,omitempty"` + Introspection string `json:"introspection,omitempty"` +} + +const ( + OAuth2DefaultTokenURI = "/oauth2/token" // #nosec G101 + OAuth2DefaultRevokeURI = "/oauth2/revoke" + OAuth2DefaultIntrospectionURI = "/oauth2/introspect" +) diff --git a/model/authentication_oauth_test.go b/model/authentication_oauth_test.go new file mode 100644 index 0000000..820dac1 --- /dev/null +++ b/model/authentication_oauth_test.go @@ -0,0 +1,164 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestOAuth2AuthenticationPolicyValidation(t *testing.T) { + testCases := []struct { + name string + policy OAuth2AuthenticationPolicy + shouldPass bool + }{ + { + name: "Valid: Use set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + }, + shouldPass: true, + }, + { + name: "Valid: Properties set", + policy: OAuth2AuthenticationPolicy{ + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: true, + }, + { + name: "Invalid: Both Use and Properties set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: false, + }, + { + name: "Invalid: Neither Use nor Properties set", + policy: OAuth2AuthenticationPolicy{}, + shouldPass: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := validate.Struct(tc.policy) + if tc.shouldPass { + if err != nil { + t.Errorf("Expected validation to pass, but got error: %v", err) + } + } else { + if err == nil { + t.Errorf("Expected validation to fail, but it passed") + } + } + }) + } +} + +func TestAuthenticationOAuth2Policy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid OAuth2 Authentication Inline", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "scopes": ["scope1", "scope2"] + } + }`, + expected: `{"oauth2":{"authority":"https://auth.example.com","grant":"client_credentials","scopes":["scope1","scope2"]}}`, + expectsErr: false, + }, + { + name: "Valid OAuth2 Authentication Use", + input: `{ + "oauth2": { + "use": "mysecret" + } + }`, + expected: `{"oauth2":{"use":"mysecret"}}`, + expectsErr: false, + }, + { + name: "Invalid OAuth2: Both properties and use set", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "use": "mysecret" + } + }`, + expectsErr: true, + }, + { + name: "Invalid OAuth2: Missing required fields", + input: `{ + "oauth2": {} + }`, + expectsErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + err = validate.Struct(authPolicy) + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/authentication_test.go b/model/authentication_test.go new file mode 100644 index 0000000..af0f687 --- /dev/null +++ b/model/authentication_test.go @@ -0,0 +1,98 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestAuthenticationPolicy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid Basic Authentication Inline", + input: `{ + "basic": { + "username": "john", + "password": "12345" + } + }`, + expected: `{"basic":{"username":"john","password":"12345"}}`, + expectsErr: false, + }, + { + name: "Valid Digest Authentication Inline", + input: `{ + "digest": { + "username": "digestUser", + "password": "digestPass" + } + }`, + expected: `{"digest":{"username":"digestUser","password":"digestPass"}}`, + expectsErr: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + if authPolicy.Basic != nil { + err = validate.Struct(authPolicy.Basic) + } + if authPolicy.Bearer != nil { + err = validate.Struct(authPolicy.Bearer) + } + if authPolicy.Digest != nil { + err = validate.Struct(authPolicy.Digest) + } + if authPolicy.OAuth2 != nil { + err = validate.Struct(authPolicy.OAuth2) + } + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/builder.go b/model/builder.go new file mode 100644 index 0000000..81a51c6 --- /dev/null +++ b/model/builder.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + + "sigs.k8s.io/yaml" +) + +// WorkflowBuilder helps construct and serialize a Workflow object. +type WorkflowBuilder struct { + workflow *Workflow +} + +// NewWorkflowBuilder initializes a new WorkflowBuilder. +func NewWorkflowBuilder() *WorkflowBuilder { + return &WorkflowBuilder{ + workflow: &Workflow{ + Document: Document{}, + Do: &TaskList{}, + }, + } +} + +// SetDocument sets the Document fields in the Workflow. +func (wb *WorkflowBuilder) SetDocument(dsl, namespace, name, version string) *WorkflowBuilder { + wb.workflow.Document.DSL = dsl + wb.workflow.Document.Namespace = namespace + wb.workflow.Document.Name = name + wb.workflow.Document.Version = version + return wb +} + +// AddTask adds a TaskItem to the Workflow's Do list. +func (wb *WorkflowBuilder) AddTask(key string, task Task) *WorkflowBuilder { + *wb.workflow.Do = append(*wb.workflow.Do, &TaskItem{ + Key: key, + Task: task, + }) + return wb +} + +// SetInput sets the Input for the Workflow. +func (wb *WorkflowBuilder) SetInput(input *Input) *WorkflowBuilder { + wb.workflow.Input = input + return wb +} + +// SetOutput sets the Output for the Workflow. +func (wb *WorkflowBuilder) SetOutput(output *Output) *WorkflowBuilder { + wb.workflow.Output = output + return wb +} + +// SetTimeout sets the Timeout for the Workflow. +func (wb *WorkflowBuilder) SetTimeout(timeout *TimeoutOrReference) *WorkflowBuilder { + wb.workflow.Timeout = timeout + return wb +} + +// SetUse sets the Use section for the Workflow. +func (wb *WorkflowBuilder) SetUse(use *Use) *WorkflowBuilder { + wb.workflow.Use = use + return wb +} + +// SetSchedule sets the Schedule for the Workflow. +func (wb *WorkflowBuilder) SetSchedule(schedule *Schedule) *WorkflowBuilder { + wb.workflow.Schedule = schedule + return wb +} + +// Build returns the constructed Workflow object. +func (wb *WorkflowBuilder) Build() *Workflow { + return wb.workflow +} + +// ToYAML serializes the Workflow to YAML format. +func (wb *WorkflowBuilder) ToYAML() ([]byte, error) { + return yaml.Marshal(wb.workflow) +} + +// ToJSON serializes the Workflow to JSON format. +func (wb *WorkflowBuilder) ToJSON() ([]byte, error) { + return json.MarshalIndent(wb.workflow, "", " ") +} diff --git a/model/endpoint.go b/model/endpoint.go new file mode 100644 index 0000000..cd9ee88 --- /dev/null +++ b/model/endpoint.go @@ -0,0 +1,217 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" + + "github.com/tidwall/gjson" +) + +// LiteralUriPattern matches standard URIs without placeholders. +var LiteralUriPattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://[^{}\s]+$`) + +// LiteralUriTemplatePattern matches URIs with placeholders. +var LiteralUriTemplatePattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://.*\{.*}.*$`) + +// URITemplate represents a URI that can be a literal URI or a URI template. +type URITemplate interface { + IsURITemplate() bool + String() string + GetValue() interface{} +} + +// UnmarshalURITemplate is a shared function for unmarshalling URITemplate fields. +func UnmarshalURITemplate(data []byte) (URITemplate, error) { + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("failed to unmarshal URITemplate: %w", err) + } + + if LiteralUriTemplatePattern.MatchString(raw) { + return &LiteralUriTemplate{Value: raw}, nil + } + + if LiteralUriPattern.MatchString(raw) { + return &LiteralUri{Value: raw}, nil + } + + return nil, fmt.Errorf("invalid URI or URI template format: %s", raw) +} + +type LiteralUriTemplate struct { + Value string `json:"-" validate:"required,uri_template_pattern"` // Validate pattern for URI template. +} + +func (t *LiteralUriTemplate) IsURITemplate() bool { + return true +} + +func (t *LiteralUriTemplate) MarshalJSON() ([]byte, error) { + return json.Marshal(t.Value) +} + +func (t *LiteralUriTemplate) String() string { + return t.Value +} + +func (t *LiteralUriTemplate) GetValue() interface{} { + return t.Value +} + +type LiteralUri struct { + Value string `json:"-" validate:"required,uri_pattern"` // Validate pattern for URI. +} + +func (u *LiteralUri) IsURITemplate() bool { + return true +} + +func (u *LiteralUri) MarshalJSON() ([]byte, error) { + return json.Marshal(u.Value) +} + +func (u *LiteralUri) String() string { + return u.Value +} + +func (u *LiteralUri) GetValue() interface{} { + return u.Value +} + +type EndpointConfiguration struct { + RuntimeExpression *RuntimeExpression `json:"-"` + URI URITemplate `json:"uri" validate:"required"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +// UnmarshalJSON implements custom unmarshalling for EndpointConfiguration. +func (e *EndpointConfiguration) UnmarshalJSON(data []byte) error { + // Use a temporary structure to unmarshal the JSON + type Alias EndpointConfiguration + temp := &struct { + URI json.RawMessage `json:"uri"` + *Alias + }{ + Alias: (*Alias)(e), + } + + if err := json.Unmarshal(data, &temp); err != nil { + return fmt.Errorf("failed to unmarshal EndpointConfiguration: %w", err) + } + + // Unmarshal the URI field into the appropriate URITemplate implementation + uri, err := UnmarshalURITemplate(temp.URI) + if err == nil { + e.URI = uri + return nil + } + + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(temp.URI, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + e.RuntimeExpression = &runtimeExpr + return nil + } + + return errors.New("failed to unmarshal EndpointConfiguration: data does not match any known schema") +} + +// MarshalJSON implements custom marshalling for Endpoint. +func (e *EndpointConfiguration) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}) + if e.Authentication != nil { + m["authentication"] = e.Authentication + } + + if e.RuntimeExpression != nil { + m["uri"] = e.RuntimeExpression + } else if e.URI != nil { + m["uri"] = e.URI + } + + // Return an empty JSON object when no fields are set + return json.Marshal(m) +} + +type Endpoint struct { + RuntimeExpression *RuntimeExpression `json:"-"` + URITemplate URITemplate `json:"-"` + EndpointConfig *EndpointConfiguration `json:"-"` +} + +func NewEndpoint(uri string) *Endpoint { + return &Endpoint{URITemplate: &LiteralUri{Value: uri}} +} + +func (e *Endpoint) String() string { + if e.RuntimeExpression != nil { + return e.RuntimeExpression.String() + } + if e.URITemplate != nil { + return e.URITemplate.String() + } + if e.EndpointConfig != nil { + return e.EndpointConfig.URI.String() + } + return "" +} + +// UnmarshalJSON implements custom unmarshalling for Endpoint. +func (e *Endpoint) UnmarshalJSON(data []byte) error { + if gjson.ValidBytes(data) && gjson.ParseBytes(data).IsObject() && len(gjson.ParseBytes(data).Map()) == 0 { + // Leave the Endpoint fields unset (nil) + return nil + } + + // Then try to unmarshal as URITemplate + if uriTemplate, err := UnmarshalURITemplate(data); err == nil { + e.URITemplate = uriTemplate + return nil + } + + // First try to unmarshal as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + e.RuntimeExpression = &runtimeExpr + return nil + } + + // Finally, try to unmarshal as EndpointConfiguration + var endpointConfig EndpointConfiguration + if err := json.Unmarshal(data, &endpointConfig); err == nil { + e.EndpointConfig = &endpointConfig + return nil + } + + return errors.New("failed to unmarshal Endpoint: data does not match any known schema") +} + +// MarshalJSON implements custom marshalling for Endpoint. +func (e *Endpoint) MarshalJSON() ([]byte, error) { + if e.RuntimeExpression != nil { + return json.Marshal(e.RuntimeExpression) + } + if e.URITemplate != nil { + return json.Marshal(e.URITemplate) + } + if e.EndpointConfig != nil { + return json.Marshal(e.EndpointConfig) + } + // Return an empty JSON object when no fields are set + return []byte("{}"), nil +} diff --git a/model/endpoint_test.go b/model/endpoint_test.go new file mode 100644 index 0000000..db2fce5 --- /dev/null +++ b/model/endpoint_test.go @@ -0,0 +1,186 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEndpoint_UnmarshalJSON(t *testing.T) { + t.Run("Valid RuntimeExpression", func(t *testing.T) { + input := `"${example}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.RuntimeExpression, "RuntimeExpression should be set") + assert.Equal(t, "${example}", endpoint.RuntimeExpression.Value, "RuntimeExpression value should match") + }) + + t.Run("Invalid RuntimeExpression", func(t *testing.T) { + input := `"123invalid-expression"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid runtime expression") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + }) + + t.Run("Invalid LiteralUriTemplate", func(t *testing.T) { + uriTemplate := &LiteralUriTemplate{Value: "example.com/{id}"} + assert.False(t, LiteralUriPattern.MatchString(uriTemplate.Value), "LiteralUriTemplate should not match URI pattern") + }) + + t.Run("Valid URITemplate", func(t *testing.T) { + input := `"http://example.com/{id}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.URITemplate, "URITemplate should be set") + }) + + t.Run("Valid EndpointConfiguration", func(t *testing.T) { + input := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "admin", "password": "admin" } + } + }` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Username, "Authentication Username should match") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Password, "Authentication Password should match") + }) + + t.Run("Valid EndpointConfiguration with reference", func(t *testing.T) { + input := `{ + "uri": "http://example.com/{id}", + "authentication": { + "oauth2": { "use": "secret" } + } + }` + + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.NotNil(t, endpoint.EndpointConfig.URI, "EndpointConfig URI should be set") + assert.Nil(t, endpoint.EndpointConfig.RuntimeExpression, "EndpointConfig Expression should not be set") + assert.Equal(t, "secret", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.OAuth2.Use, "Authentication secret should match") + b, err := json.Marshal(&endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, input, string(b), "Output JSON should match") + }) + + t.Run("Valid EndpointConfiguration with reference and expression", func(t *testing.T) { + input := `{ + "uri": "${example}", + "authentication": { + "oauth2": { "use": "secret" } + } + }` + + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.Nil(t, endpoint.EndpointConfig.URI, "EndpointConfig URI should not be set") + assert.NotNil(t, endpoint.EndpointConfig.RuntimeExpression, "EndpointConfig Expression should be set") + assert.Equal(t, "secret", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.OAuth2.Use, "Authentication secret should match") + b, err := json.Marshal(&endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, input, string(b), "Output JSON should match") + }) + + t.Run("Invalid JSON Structure", func(t *testing.T) { + input := `{"invalid": "data"}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid JSON structure") + }) + + t.Run("Empty input", func(t *testing.T) { + input := `{}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error for empty input") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + assert.Nil(t, endpoint.URITemplate, "URITemplate should not be set") + assert.Nil(t, endpoint.EndpointConfig, "EndpointConfig should not be set") + }) +} + +func TestEndpoint_MarshalJSON(t *testing.T) { + t.Run("Marshal RuntimeExpression", func(t *testing.T) { + endpoint := &Endpoint{ + RuntimeExpression: &RuntimeExpression{Value: "${example}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"${example}"`, string(data), "output JSON should match") + }) + + t.Run("Marshal URITemplate", func(t *testing.T) { + endpoint := &Endpoint{ + URITemplate: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"http://example.com/{id}"`, string(data), "output JSON should match") + }) + + t.Run("Marshal EndpointConfiguration", func(t *testing.T) { + endpoint := &Endpoint{ + EndpointConfig: &EndpointConfiguration{ + URI: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + Authentication: &ReferenceableAuthenticationPolicy{AuthenticationPolicy: &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: "john", + Password: "secret", + }}}, + }, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + expected := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "john", "password": "secret" } + } + }` + assert.JSONEq(t, expected, string(data), "output JSON should match") + }) + + t.Run("Marshal Empty Endpoint", func(t *testing.T) { + endpoint := Endpoint{} + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `{}`, string(data), "output JSON should be empty") + }) +} diff --git a/model/errors.go b/model/errors.go new file mode 100644 index 0000000..9700f17 --- /dev/null +++ b/model/errors.go @@ -0,0 +1,269 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "strings" +) + +// List of Standard Errors based on the Serverless Workflow specification. +// See: https://github.com/serverlessworkflow/specification/blob/main/dsl-reference.md#standard-error-types +const ( + ErrorTypeConfiguration = "https://serverlessworkflow.io/spec/1.0.0/errors/configuration" + ErrorTypeValidation = "https://serverlessworkflow.io/spec/1.0.0/errors/validation" + ErrorTypeExpression = "https://serverlessworkflow.io/spec/1.0.0/errors/expression" + ErrorTypeAuthentication = "https://serverlessworkflow.io/spec/1.0.0/errors/authentication" + ErrorTypeAuthorization = "https://serverlessworkflow.io/spec/1.0.0/errors/authorization" + ErrorTypeTimeout = "https://serverlessworkflow.io/spec/1.0.0/errors/timeout" + ErrorTypeCommunication = "https://serverlessworkflow.io/spec/1.0.0/errors/communication" + ErrorTypeRuntime = "https://serverlessworkflow.io/spec/1.0.0/errors/runtime" +) + +type Error struct { + // A URI reference that identifies the error type. + // For cross-compatibility concerns, it is strongly recommended to use Standard Error Types whenever possible. + // Runtimes MUST ensure that the property has been set when raising or escalating the error. + Type *URITemplateOrRuntimeExpr `json:"type" validate:"required"` + // The status code generated by the origin for this occurrence of the error. + // For cross-compatibility concerns, it is strongly recommended to use HTTP Status Codes whenever possible. + // Runtimes MUST ensure that the property has been set when raising or escalating the error. + Status int `json:"status" validate:"required"` + // A short, human-readable summary of the error. + Title *StringOrRuntimeExpr `json:"title,omitempty"` + // A human-readable explanation specific to this occurrence of the error. + Detail *StringOrRuntimeExpr `json:"detail,omitempty"` + // A JSON Pointer used to reference the component the error originates from. + // Runtimes MUST set the property when raising or escalating the error. Otherwise ignore. + Instance *JsonPointerOrRuntimeExpression `json:"instance,omitempty" validate:"omitempty"` +} + +type ErrorFilter struct { + Type string `json:"type,omitempty"` + Status int `json:"status,omitempty"` + Instance string `json:"instance,omitempty"` + Title string `json:"title,omitempty"` + Details string `json:"details,omitempty"` +} + +func (e *Error) Error() string { + return fmt.Sprintf("[%d] %s: %s (%s). Origin: '%s'", e.Status, e.Title, e.Detail, e.Type, e.Instance) +} + +// WithInstanceRef ensures the error has a valid JSON Pointer reference +func (e *Error) WithInstanceRef(workflow *Workflow, taskName string) *Error { + if e == nil { + return nil + } + + // Check if the instance is already set + if e.Instance.IsValid() { + return e + } + + // Generate a JSON pointer reference for the task within the workflow + //instance, pointerErr := GenerateJSONPointer(workflow, taskName) + //if pointerErr == nil { + // e.Instance = &JsonPointerOrRuntimeExpression{Value: instance} + //} + // TODO: log the pointer error + + return e +} + +// newError creates a new structured error +func newError(errType string, status int, title string, detail error, instance string) *Error { + if detail != nil { + return &Error{ + Type: NewUriTemplate(errType), + Status: status, + Title: NewStringOrRuntimeExpr(title), + Detail: NewStringOrRuntimeExpr(detail.Error()), + Instance: &JsonPointerOrRuntimeExpression{ + Value: instance, + }, + } + } + + return &Error{ + Type: NewUriTemplate(errType), + Status: status, + Title: NewStringOrRuntimeExpr(title), + Instance: &JsonPointerOrRuntimeExpression{ + Value: instance, + }, + } +} + +// Convenience Functions for Standard Errors + +func NewErrConfiguration(detail error, instance string) *Error { + return newError( + ErrorTypeConfiguration, + 400, + "Configuration Error", + detail, + instance, + ) +} + +func NewErrValidation(detail error, instance string) *Error { + return newError( + ErrorTypeValidation, + 400, + "Validation Error", + detail, + instance, + ) +} + +func NewErrExpression(detail error, instance string) *Error { + return newError( + ErrorTypeExpression, + 400, + "Expression Error", + detail, + instance, + ) +} + +func NewErrAuthentication(detail error, instance string) *Error { + return newError( + ErrorTypeAuthentication, + 401, + "Authentication Error", + detail, + instance, + ) +} + +func NewErrAuthorization(detail error, instance string) *Error { + return newError( + ErrorTypeAuthorization, + 403, + "Authorization Error", + detail, + instance, + ) +} + +func NewErrTimeout(detail error, instance string) *Error { + return newError( + ErrorTypeTimeout, + 408, + "Timeout Error", + detail, + instance, + ) +} + +func NewErrCommunication(detail error, instance string) *Error { + return newError( + ErrorTypeCommunication, + 500, + "Communication Error", + detail, + instance, + ) +} + +func NewErrRuntime(detail error, instance string) *Error { + return newError( + ErrorTypeRuntime, + 500, + "Runtime Error", + detail, + instance, + ) +} + +// Error Classification Functions + +func IsErrConfiguration(err error) bool { + return isErrorType(err, ErrorTypeConfiguration) +} + +func IsErrValidation(err error) bool { + return isErrorType(err, ErrorTypeValidation) +} + +func IsErrExpression(err error) bool { + return isErrorType(err, ErrorTypeExpression) +} + +func IsErrAuthentication(err error) bool { + return isErrorType(err, ErrorTypeAuthentication) +} + +func IsErrAuthorization(err error) bool { + return isErrorType(err, ErrorTypeAuthorization) +} + +func IsErrTimeout(err error) bool { + return isErrorType(err, ErrorTypeTimeout) +} + +func IsErrCommunication(err error) bool { + return isErrorType(err, ErrorTypeCommunication) +} + +func IsErrRuntime(err error) bool { + return isErrorType(err, ErrorTypeRuntime) +} + +// Helper function to check error type +func isErrorType(err error, errorType string) bool { + var e *Error + if ok := errors.As(err, &e); ok && strings.EqualFold(e.Type.String(), errorType) { + return true + } + return false +} + +// AsError attempts to extract a known error type from the given error. +// If the error is one of the predefined structured errors, it returns the *Error. +// Otherwise, it returns nil. +func AsError(err error) *Error { + var e *Error + if errors.As(err, &e) { + return e // Successfully extracted as a known error type + } + return nil // Not a known error +} + +// Serialization and Deserialization Functions + +func ErrorToJSON(err *Error) (string, error) { + if err == nil { + return "", fmt.Errorf("error is nil") + } + jsonBytes, marshalErr := json.Marshal(err) + if marshalErr != nil { + return "", fmt.Errorf("failed to marshal error: %w", marshalErr) + } + return string(jsonBytes), nil +} + +func ErrorFromJSON(jsonStr string) (*Error, error) { + var errObj Error + if err := json.Unmarshal([]byte(jsonStr), &errObj); err != nil { + return nil, fmt.Errorf("failed to unmarshal error JSON: %w", err) + } + return &errObj, nil +} + +// JsonPointer functions diff --git a/model/event.go b/model/event.go deleted file mode 100644 index cf44d99..0000000 --- a/model/event.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -const ( - // EventKindConsumed ... - EventKindConsumed EventKind = "consumed" - // EventKindProduced ... - EventKindProduced EventKind = "produced" -) - -func init() { - val.GetValidator().RegisterStructValidation(EventStructLevelValidation, Event{}) -} - -// EventStructLevelValidation custom validator for event kind consumed -func EventStructLevelValidation(structLevel validator.StructLevel) { - event := structLevel.Current().Interface().(Event) - - if event.Kind == EventKindConsumed && len(event.Type) == 0 { - structLevel.ReportError(reflect.ValueOf(event.Type), "Type", "type", "reqtypeconsumed", "") - } -} - -// EventKind ... -type EventKind string - -// Event ... -type Event struct { - Common - // Unique event name - Name string `json:"name" validate:"required"` - // CloudEvent source - Source string `json:"source,omitempty"` - // CloudEvent type - Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Default is 'consumed' - Kind EventKind `json:"kind,omitempty"` - // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload and context attributes should be accessible" - DataOnly bool `json:"dataOnly,omitempty"` - // CloudEvent correlation definitions - Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` -} - -// Correlation ... -type Correlation struct { - // CloudEvent Extension Context Attribute name - ContextAttributeName string `json:"contextAttributeName" validate:"required"` - // CloudEvent Extension Context Attribute value - ContextAttributeValue string `json:"contextAttributeValue,omitempty"` -} - -// EventRef ... -type EventRef struct { - // Reference to the unique name of a 'produced' event definition - TriggerEventRef string `json:"triggerEventRef" validate:"required"` - // Reference to the unique name of a 'consumed' event definition - ResultEventRef string `json:"resultEventRef" validate:"required"` - // TODO: create StringOrMap structure - // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. - Data interface{} `json:"data,omitempty"` - // Add additional extension context attributes to the produced event - ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` -} diff --git a/model/extension.go b/model/extension.go new file mode 100644 index 0000000..b7b49ec --- /dev/null +++ b/model/extension.go @@ -0,0 +1,120 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Extension represents the definition of an extension. +type Extension struct { + Extend string `json:"extend" validate:"required,oneof=call composite emit for listen raise run set switch try wait all"` + When *RuntimeExpression `json:"when,omitempty"` + Before *TaskList `json:"before,omitempty" validate:"omitempty,dive"` + After *TaskList `json:"after,omitempty" validate:"omitempty,dive"` +} + +// ExtensionItem represents a named extension and its associated definition. +type ExtensionItem struct { + Key string `json:"-" validate:"required"` + Extension *Extension `json:"-" validate:"required"` +} + +// MarshalJSON for ExtensionItem to serialize as a single-key object. +func (ei *ExtensionItem) MarshalJSON() ([]byte, error) { + if ei == nil { + return nil, fmt.Errorf("cannot marshal a nil ExtensionItem") + } + + extensionJSON, err := json.Marshal(ei.Extension) + if err != nil { + return nil, fmt.Errorf("failed to marshal extension: %w", err) + } + + return json.Marshal(map[string]json.RawMessage{ + ei.Key: extensionJSON, + }) +} + +// UnmarshalJSON for ExtensionItem to deserialize from a single-key object. +func (ei *ExtensionItem) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionItem: %w", err) + } + + if len(raw) != 1 { + return fmt.Errorf("each ExtensionItem must have exactly one key") + } + + for key, extensionData := range raw { + var ext Extension + if err := json.Unmarshal(extensionData, &ext); err != nil { + return fmt.Errorf("failed to unmarshal extension %q: %w", key, err) + } + ei.Key = key + ei.Extension = &ext + break + } + + return nil +} + +// ExtensionList represents a list of extensions. +type ExtensionList []*ExtensionItem + +// Key retrieves all extensions with the specified key. +func (el *ExtensionList) Key(key string) *Extension { + for _, item := range *el { + if item.Key == key { + return item.Extension + } + } + return nil +} + +// UnmarshalJSON for ExtensionList to deserialize an array of ExtensionItem objects. +func (el *ExtensionList) UnmarshalJSON(data []byte) error { + var rawExtensions []json.RawMessage + if err := json.Unmarshal(data, &rawExtensions); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionList: %w", err) + } + + for _, raw := range rawExtensions { + var item ExtensionItem + if err := json.Unmarshal(raw, &item); err != nil { + return fmt.Errorf("failed to unmarshal extension item: %w", err) + } + *el = append(*el, &item) + } + + return nil +} + +// MarshalJSON for ExtensionList to serialize as an array of ExtensionItem objects. +func (el *ExtensionList) MarshalJSON() ([]byte, error) { + var serializedExtensions []json.RawMessage + + for _, item := range *el { + serialized, err := json.Marshal(item) + if err != nil { + return nil, fmt.Errorf("failed to marshal ExtensionItem: %w", err) + } + serializedExtensions = append(serializedExtensions, serialized) + } + + return json.Marshal(serializedExtensions) +} diff --git a/model/extension_test.go b/model/extension_test.go new file mode 100644 index 0000000..f258a4c --- /dev/null +++ b/model/extension_test.go @@ -0,0 +1,140 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + validator "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestExtension_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var extension Extension + err := json.Unmarshal([]byte(jsonData), &extension) + assert.NoError(t, err) + assert.Equal(t, "call", extension.Extend) + assert.Equal(t, NewExpr("${condition}"), extension.When) + + task1 := extension.Before.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + // Check if task2 exists before accessing its fields + task2 := extension.After.Key("task2") + assert.NotNil(t, task2, "task2 should not be nil") + openAPITask := task2.AsCallOpenAPITask() + assert.NotNil(t, openAPITask) + assert.Equal(t, "openapi", openAPITask.Call) + assert.Equal(t, "doc1", openAPITask.With.Document.Name) + assert.Equal(t, "op1", openAPITask.With.OperationID) +} + +func TestExtension_MarshalJSON(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(extension) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestExtension_Validation(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "doc1", // Missing Endpoint + }, + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(extension) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s': %s", + validationErr.StructNamespace(), validationErr.Tag(), validationErr.Param()) + } + + // Assert on specific validation errors + assert.Contains(t, validationErrors.Error(), "After[0].Task.With.Document.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") + } else { + t.Errorf("Unexpected error type: %v", err) + } +} diff --git a/model/function.go b/model/function.go deleted file mode 100644 index 662b341..0000000 --- a/model/function.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "encoding/json" - -const ( - // FunctionTypeREST ... - FunctionTypeREST FunctionType = "rest" - // FunctionTypeRPC ... - FunctionTypeRPC FunctionType = "rpc" - // FunctionTypeExpression ... - FunctionTypeExpression FunctionType = "expression" - // FunctionTypeGraphQL ... - FunctionTypeGraphQL FunctionType = "graphql" - // FunctionTypeAsyncAPI ... - FunctionTypeAsyncAPI FunctionType = "asyncapi" - // FunctionTypeOData ... - FunctionTypeOData FunctionType = "odata" -) - -// FunctionType ... -type FunctionType string - -// Function ... -type Function struct { - Common - // Unique function name - Name string `json:"name" validate:"required"` - // If type is `rest`, #. If type is `rpc`, ##. If type is `expression`, defines the workflow expression. - Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `rest`, `rpc`, `expression` or `graphql`. Default is `rest` - Type FunctionType `json:"type,omitempty"` - // References an auth definition name to be used to access to resource defined in the operation parameter - AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` -} - -// FunctionRef ... -type FunctionRef struct { - // Name of the referenced function - RefName string `json:"refName" validate:"required"` - // Function arguments - Arguments map[string]interface{} `json:"arguments,omitempty"` - // String containing a valid GraphQL selection set - SelectionSet string `json:"selectionSet,omitempty"` -} - -// UnmarshalJSON ... -func (f *FunctionRef) UnmarshalJSON(data []byte) error { - funcRef := make(map[string]interface{}) - if err := json.Unmarshal(data, &funcRef); err != nil { - f.RefName, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - - f.RefName = requiresNotNilOrEmpty(funcRef["refName"]) - if _, found := funcRef["arguments"]; found { - f.Arguments = funcRef["arguments"].(map[string]interface{}) - } - f.SelectionSet = requiresNotNilOrEmpty(funcRef["selectionSet"]) - - return nil -} diff --git a/model/objects.go b/model/objects.go new file mode 100644 index 0000000..2bb8dd9 --- /dev/null +++ b/model/objects.go @@ -0,0 +1,345 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" +) + +var _ Object = &ObjectOrString{} +var _ Object = &ObjectOrRuntimeExpr{} +var _ Object = &RuntimeExpression{} +var _ Object = &URITemplateOrRuntimeExpr{} +var _ Object = &StringOrRuntimeExpr{} +var _ Object = &JsonPointerOrRuntimeExpression{} + +type Object interface { + String() string + GetValue() interface{} +} + +// ObjectOrString is a type that can hold either a string or an object. +type ObjectOrString struct { + Value interface{} `validate:"object_or_string"` +} + +func (o *ObjectOrString) String() string { + return fmt.Sprintf("%v", o.Value) +} + +func (o *ObjectOrString) GetValue() interface{} { + return o.Value +} + +// UnmarshalJSON unmarshals data into either a string or an object. +func (o *ObjectOrString) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + o.Value = asString + return nil + } + + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + return errors.New("ObjectOrString must be a string or an object") +} + +// MarshalJSON marshals ObjectOrString into JSON. +func (o *ObjectOrString) MarshalJSON() ([]byte, error) { + return json.Marshal(o.Value) +} + +// ObjectOrRuntimeExpr is a type that can hold either a RuntimeExpression or an object. +type ObjectOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"object_or_runtime_expr"` // Custom validation tag. +} + +func NewObjectOrRuntimeExpr(value interface{}) *ObjectOrRuntimeExpr { + return &ObjectOrRuntimeExpr{ + Value: value, + } +} + +func (o *ObjectOrRuntimeExpr) String() string { + return fmt.Sprintf("%v", o.Value) +} + +func (o *ObjectOrRuntimeExpr) GetValue() interface{} { + return o.Value +} + +func (o *ObjectOrRuntimeExpr) AsStringOrMap() interface{} { + switch o.Value.(type) { + case map[string]interface{}: + return o.Value.(map[string]interface{}) + case string: + return o.Value.(string) + case RuntimeExpression: + return o.Value.(RuntimeExpression).Value + } + return nil +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or an object. +func (o *ObjectOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + o.Value = runtimeExpr + return nil + } + + // Attempt to decode as a generic object + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("ObjectOrRuntimeExpr must be a runtime expression or an object") +} + +// MarshalJSON marshals ObjectOrRuntimeExpr into JSON. +func (o *ObjectOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := o.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case map[string]interface{}: + return json.Marshal(v) + default: + return nil, fmt.Errorf("ObjectOrRuntimeExpr contains unsupported type") + } +} + +// Validate validates the ObjectOrRuntimeExpr using the custom validation logic. +func (o *ObjectOrRuntimeExpr) Validate() error { + switch v := o.Value.(type) { + case RuntimeExpression: + if !v.IsValid() { + return fmt.Errorf("invalid runtime expression: %s", v.Value) + } + case map[string]interface{}: + if len(v) == 0 { + return fmt.Errorf("object cannot be empty") + } + default: + return fmt.Errorf("unsupported value type for ObjectOrRuntimeExpr") + } + return nil +} + +func NewStringOrRuntimeExpr(value string) *StringOrRuntimeExpr { + return &StringOrRuntimeExpr{ + Value: value, + } +} + +// StringOrRuntimeExpr is a type that can hold either a RuntimeExpression or a string. +type StringOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"string_or_runtime_expr"` // Custom validation tag. +} + +func (s *StringOrRuntimeExpr) AsObjectOrRuntimeExpr() *ObjectOrRuntimeExpr { + return &ObjectOrRuntimeExpr{Value: s.Value} +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or a string. +func (s *StringOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + s.Value = runtimeExpr + return nil + } + + // Attempt to decode as a string + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + s.Value = asString + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("StringOrRuntimeExpr must be a runtime expression or a string") +} + +// MarshalJSON marshals StringOrRuntimeExpr into JSON. +func (s *StringOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := s.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + return json.Marshal(v) + default: + return nil, fmt.Errorf("StringOrRuntimeExpr contains unsupported type") + } +} + +func (s *StringOrRuntimeExpr) String() string { + switch v := s.Value.(type) { + case RuntimeExpression: + return v.String() + case string: + return v + default: + return "" + } +} + +func (s *StringOrRuntimeExpr) GetValue() interface{} { + return s.Value +} + +// URITemplateOrRuntimeExpr represents a type that can be a URITemplate or a RuntimeExpression. +type URITemplateOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"uri_template_or_runtime_expr"` // Custom validation. +} + +func NewUriTemplate(uriTemplate string) *URITemplateOrRuntimeExpr { + return &URITemplateOrRuntimeExpr{ + Value: uriTemplate, + } +} + +// UnmarshalJSON unmarshals data into either a URITemplate or a RuntimeExpression. +func (u *URITemplateOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as URITemplate + uriTemplate, err := UnmarshalURITemplate(data) + if err == nil { + u.Value = uriTemplate + return nil + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + u.Value = runtimeExpr + return nil + } + + // Return an error if neither succeeds + return fmt.Errorf("URITemplateOrRuntimeExpr must be a valid URITemplate or RuntimeExpression") +} + +// MarshalJSON marshals URITemplateOrRuntimeExpr into JSON. +func (u *URITemplateOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := u.Value.(type) { + case URITemplate: + return json.Marshal(v.String()) + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + // Attempt to marshal as RuntimeExpression + runtimeExpr := RuntimeExpression{Value: v} + if runtimeExpr.IsValid() { + return json.Marshal(runtimeExpr.String()) + } + // Otherwise, treat as a Literal URI + uriTemplate, err := UnmarshalURITemplate([]byte(fmt.Sprintf(`"%s"`, v))) + if err == nil { + return json.Marshal(uriTemplate.String()) + } + return nil, fmt.Errorf("invalid string for URITemplateOrRuntimeExpr: %s", v) + default: + return nil, fmt.Errorf("unsupported type for URITemplateOrRuntimeExpr: %T", v) + } +} + +func (u *URITemplateOrRuntimeExpr) String() string { + switch v := u.Value.(type) { + case URITemplate: + return v.String() + case RuntimeExpression: + return v.String() + case string: + return v + } + return "" +} + +func (u *URITemplateOrRuntimeExpr) GetValue() interface{} { + return u.Value +} + +// JsonPointerOrRuntimeExpression represents a type that can be a JSON Pointer or a RuntimeExpression. +type JsonPointerOrRuntimeExpression struct { + Value interface{} `json:"-" validate:"json_pointer_or_runtime_expr"` // Custom validation tag. +} + +// JSONPointerPattern validates JSON Pointers as per RFC 6901. +var JSONPointerPattern = regexp.MustCompile(`^(/([^/~]|~[01])*)*$`) + +// UnmarshalJSON unmarshals data into either a JSON Pointer or a RuntimeExpression. +func (j *JsonPointerOrRuntimeExpression) UnmarshalJSON(data []byte) error { + // Attempt to decode as a JSON Pointer + var jsonPointer string + if err := json.Unmarshal(data, &jsonPointer); err == nil { + if JSONPointerPattern.MatchString(jsonPointer) { + j.Value = jsonPointer + return nil + } + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil { + if runtimeExpr.IsValid() { + j.Value = runtimeExpr + return nil + } + } + + // If neither succeeds, return an error + return fmt.Errorf("JsonPointerOrRuntimeExpression must be a valid JSON Pointer or RuntimeExpression") +} + +// MarshalJSON marshals JsonPointerOrRuntimeExpression into JSON. +func (j *JsonPointerOrRuntimeExpression) MarshalJSON() ([]byte, error) { + switch v := j.Value.(type) { + case string: // JSON Pointer + return json.Marshal(v) + case RuntimeExpression: + return json.Marshal(v.String()) + default: + return nil, fmt.Errorf("JsonPointerOrRuntimeExpression contains unsupported type") + } +} + +func (j *JsonPointerOrRuntimeExpression) String() string { + switch v := j.Value.(type) { + case RuntimeExpression: + return v.String() + case string: + return v + default: + return "" + } +} + +func (j *JsonPointerOrRuntimeExpression) GetValue() interface{} { + return j.Value +} + +func (j *JsonPointerOrRuntimeExpression) IsValid() bool { + return JSONPointerPattern.MatchString(j.String()) +} diff --git a/model/objects_test.go b/model/objects_test.go new file mode 100644 index 0000000..c77d3bb --- /dev/null +++ b/model/objects_test.go @@ -0,0 +1,190 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestObjectOrRuntimeExpr_UnmarshalJSON(t *testing.T) { + cases := []struct { + Name string + JSON string + Expected interface{} + ShouldErr bool + }{ + { + Name: "Unmarshal valid string", + JSON: `"${ expression }"`, + Expected: RuntimeExpression{Value: "${ expression }"}, + ShouldErr: false, + }, + { + Name: "Unmarshal valid object", + JSON: `{ + "key": "value" + }`, + Expected: map[string]interface{}{ + "key": "value", + }, + ShouldErr: false, + }, + { + Name: "Unmarshal invalid type", + JSON: `123`, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + var obj ObjectOrRuntimeExpr + err := json.Unmarshal([]byte(tc.JSON), &obj) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + assert.Equal(t, tc.Expected, obj.Value, "unexpected unmarshalled value") + } + }) + } +} + +func TestURITemplateOrRuntimeExprValidation(t *testing.T) { + cases := []struct { + Name string + Input *URITemplateOrRuntimeExpr + ShouldErr bool + }{ + { + Name: "Valid URI template", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + }, + ShouldErr: false, + }, + { + Name: "Valid URI", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "http://example.com"}, + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Invalid URI format", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "invalid-uri"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: &URITemplateOrRuntimeExpr{ + Value: 123, + }, + ShouldErr: true, + }, + { + Name: "Valid URI as string", + Input: &URITemplateOrRuntimeExpr{ + Value: "http://example.com", + }, + ShouldErr: false, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "uri_template_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} + +func TestJsonPointerOrRuntimeExpressionValidation(t *testing.T) { + cases := []struct { + Name string + Input JsonPointerOrRuntimeExpression + ShouldErr bool + }{ + { + Name: "Valid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "/valid/json/pointer", + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "invalid-json-pointer", + }, + ShouldErr: true, + }, + { + Name: "Invalid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: JsonPointerOrRuntimeExpression{ + Value: 123, + }, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "json_pointer_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} diff --git a/model/retry.go b/model/retry.go deleted file mode 100644 index 5f232af..0000000 --- a/model/retry.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - "k8s.io/apimachinery/pkg/util/intstr" -) - -// Retry ... -type Retry struct { - // Unique retry strategy name - Name string `json:"name" validate:"required"` - // Time delay between retry attempts (ISO 8601 duration format) - Delay string `json:"delay,omitempty"` - // Maximum time delay between retry attempts (ISO 8601 duration format) - MaxDelay string `json:"maxDelay,omitempty"` - // Static value by which the delay increases during each attempt (ISO 8601 time format) - Increment string `json:"increment,omitempty"` - // Numeric value, if specified the delay between retries is multiplied by this value. - Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` - // Maximum number of retry attempts. - MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` - // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) - Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` -} diff --git a/model/runtime_expression.go b/model/runtime_expression.go new file mode 100644 index 0000000..adef566 --- /dev/null +++ b/model/runtime_expression.go @@ -0,0 +1,106 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/itchyny/gojq" +) + +// RuntimeExpression represents a runtime expression. +type RuntimeExpression struct { + Value string `json:"-" validate:"required"` +} + +// NewRuntimeExpression is an alias for NewExpr +var NewRuntimeExpression = NewExpr + +// NewExpr creates a new RuntimeExpression instance +func NewExpr(runtimeExpression string) *RuntimeExpression { + return &RuntimeExpression{Value: runtimeExpression} +} + +// IsStrictExpr returns true if the string is enclosed in `${ }` +func IsStrictExpr(expression string) bool { + return strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") +} + +// SanitizeExpr processes the expression to ensure it's ready for evaluation +// It removes `${}` if present and replaces single quotes with double quotes +func SanitizeExpr(expression string) string { + // Remove `${}` enclosure if present + if IsStrictExpr(expression) { + expression = strings.TrimSpace(expression[2 : len(expression)-1]) + } + + // Replace single quotes with double quotes + expression = strings.ReplaceAll(expression, "'", "\"") + + return expression +} + +func IsValidExpr(expression string) bool { + expression = SanitizeExpr(expression) + _, err := gojq.Parse(expression) + return err == nil +} + +// NormalizeExpr adds ${} to the given string +func NormalizeExpr(expr string) string { + if strings.HasPrefix(expr, "${") { + return expr + } + return fmt.Sprintf("${%s}", expr) +} + +// IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. +func (r *RuntimeExpression) IsValid() bool { + return IsValidExpr(r.Value) +} + +// UnmarshalJSON implements custom unmarshalling for RuntimeExpression. +func (r *RuntimeExpression) UnmarshalJSON(data []byte) error { + // Decode the input as a string + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RuntimeExpression: %w", err) + } + + // Assign the value + r.Value = raw + + // Validate the runtime expression + if !r.IsValid() { + return fmt.Errorf("invalid runtime expression format: %s", raw) + } + + return nil +} + +// MarshalJSON implements custom marshalling for RuntimeExpression. +func (r *RuntimeExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(r.Value) +} + +func (r *RuntimeExpression) String() string { + return r.Value +} + +func (r *RuntimeExpression) GetValue() interface{} { + return r.Value +} diff --git a/model/runtime_expression_test.go b/model/runtime_expression_test.go new file mode 100644 index 0000000..770af70 --- /dev/null +++ b/model/runtime_expression_test.go @@ -0,0 +1,219 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRuntimeExpressionUnmarshalJSON(t *testing.T) { + tests := []struct { + Name string + JSONInput string + Expected string + ExpectErr bool + }{ + { + Name: "Valid RuntimeExpression", + JSONInput: `{ "expression": "${runtime.value}" }`, + Expected: "${runtime.value}", + ExpectErr: false, + }, + { + Name: "Invalid RuntimeExpression", + JSONInput: `{ "expression": "1234invalid_runtime" }`, + Expected: "", + ExpectErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.Name, func(t *testing.T) { + var acme *RuntimeExpressionAcme + err := json.Unmarshal([]byte(tc.JSONInput), &acme) + + if tc.ExpectErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, acme.Expression.Value) + } + + // Test marshalling + if !tc.ExpectErr { + output, err := json.Marshal(acme) + assert.NoError(t, err) + assert.JSONEq(t, tc.JSONInput, string(output)) + } + }) + } +} + +// EndpointAcme represents a struct using URITemplate. +type RuntimeExpressionAcme struct { + Expression RuntimeExpression `json:"expression"` +} + +func TestIsStrictExpr(t *testing.T) { + tests := []struct { + name string + expression string + want bool + }{ + { + name: "StrictExpr with braces", + expression: "${.some.path}", + want: true, + }, + { + name: "Missing closing brace", + expression: "${.some.path", + want: false, + }, + { + name: "Missing opening brace", + expression: ".some.path}", + want: false, + }, + { + name: "Empty string", + expression: "", + want: false, + }, + { + name: "No braces at all", + expression: ".some.path", + want: false, + }, + { + name: "With spaces but still correct", + expression: "${ .some.path }", + want: true, + }, + { + name: "Only braces", + expression: "${}", + want: true, // Technically matches prefix+suffix + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := IsStrictExpr(tc.expression) + if got != tc.want { + t.Errorf("IsStrictExpr(%q) = %v, want %v", tc.expression, got, tc.want) + } + }) + } +} + +func TestSanitize(t *testing.T) { + tests := []struct { + name string + expression string + want string + }{ + { + name: "Remove braces and replace single quotes", + expression: "${ 'some.path' }", + want: `"some.path"`, + }, + { + name: "Already sanitized string, no braces", + expression: ".some.path", + want: ".some.path", + }, + { + name: "Multiple single quotes", + expression: "${ 'foo' + 'bar' }", + want: `"foo" + "bar"`, + }, + { + name: "Only braces with spaces", + expression: "${ }", + want: "", + }, + { + name: "No braces, just single quotes to be replaced", + expression: "'some.path'", + want: `"some.path"`, + }, + { + name: "Nothing to sanitize", + expression: "", + want: "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := SanitizeExpr(tc.expression) + if got != tc.want { + t.Errorf("Sanitize(%q) = %q, want %q", tc.expression, got, tc.want) + } + }) + } +} + +func TestIsValid(t *testing.T) { + tests := []struct { + name string + expression string + want bool + }{ + { + name: "Valid expression - simple path", + expression: "${ .foo }", + want: true, + }, + { + name: "Valid expression - array slice", + expression: "${ .arr[0] }", + want: true, + }, + { + name: "Invalid syntax", + expression: "${ .foo( }", + want: false, + }, + { + name: "No braces but valid JQ (directly provided)", + expression: ".bar", + want: true, + }, + { + name: "Empty expression", + expression: "", + want: true, // empty is parseable but yields an empty query + }, + { + name: "Invalid bracket usage", + expression: "${ .arr[ }", + want: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := IsValidExpr(tc.expression) + if got != tc.want { + t.Errorf("IsValid(%q) = %v, want %v", tc.expression, got, tc.want) + } + }) + } +} diff --git a/model/states.go b/model/states.go deleted file mode 100644 index 13272ae..0000000 --- a/model/states.go +++ /dev/null @@ -1,487 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -const ( - //StateTypeDelay ... - StateTypeDelay = "delay" - // StateTypeEvent ... - StateTypeEvent = "event" - // StateTypeOperation ... - StateTypeOperation = "operation" - // StateTypeParallel ... - StateTypeParallel = "parallel" - // StateTypeSwitch ... - StateTypeSwitch = "switch" - // StateTypeForEach ... - StateTypeForEach = "foreach" - // StateTypeInject ... - StateTypeInject = "inject" - // StateTypeCallback ... - StateTypeCallback = "callback" - // StateTypeSleep ... - StateTypeSleep = "sleep" - - // CompletionTypeAllOf ... - CompletionTypeAllOf CompletionType = "allOf" - // CompletionTypeAtLeast ... - CompletionTypeAtLeast CompletionType = "atLeast" - - // ForEachModeTypeSequential ... - ForEachModeTypeSequential ForEachModeType = "sequential" - // ForEachModeTypeParallel ... - ForEachModeTypeParallel ForEachModeType = "parallel" -) - -// StateType ... -type StateType string - -// CompletionType Option types on how to complete branch execution. -type CompletionType string - -// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) -type ForEachModeType string - -// State definition for a Workflow state -type State interface { - GetID() string - GetName() string - GetType() StateType - GetOnErrors() []OnError - GetTransition() *Transition - GetStateDataFilter() *StateDataFilter - GetCompensatedBy() string - GetUsedForCompensation() bool - GetEnd() *End - GetMetadata() *Metadata -} - -// BaseState ... -type BaseState struct { - // Unique State id - ID string `json:"id,omitempty" validate:"omitempty,min=1"` - // State name - Name string `json:"name" validate:"required"` - // State type - Type StateType `json:"type" validate:"required"` - // States error handling and retries definitions - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` - // Next transition of the workflow after the time delay - Transition *Transition `json:"transition,omitempty"` - // State data filter - StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` - // Unique Name of a workflow state which is responsible for compensation of this state - CompensatedBy string `json:"compensatedBy,omitempty" validate:"omitempty,min=1"` - // If true, this state is used to compensate another state. Default is false - UsedForCompensation bool `json:"usedForCompensation,omitempty"` - // State end definition - End *End `json:"end,omitempty"` - Metadata *Metadata `json:"metadata,omitempty"` -} - -// GetOnErrors ... -func (s *BaseState) GetOnErrors() []OnError { return s.OnErrors } - -// GetCompensatedBy ... -func (s *BaseState) GetCompensatedBy() string { return s.CompensatedBy } - -// GetTransition ... -func (s *BaseState) GetTransition() *Transition { return s.Transition } - -// GetUsedForCompensation ... -func (s *BaseState) GetUsedForCompensation() bool { return s.UsedForCompensation } - -// GetEnd ... -func (s *BaseState) GetEnd() *End { return s.End } - -// GetID ... -func (s *BaseState) GetID() string { return s.ID } - -// GetName ... -func (s *BaseState) GetName() string { return s.Name } - -// GetType ... -func (s *BaseState) GetType() StateType { return s.Type } - -// GetStateDataFilter ... -func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFilter } - -// GetMetadata ... -func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } - -// DelayState Causes the workflow execution to delay for a specified duration -type DelayState struct { - BaseState - // Amount of time (ISO 8601 format) to delay - TimeDelay string `json:"timeDelay" validate:"required"` -} - -// EventState This state is used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel -type EventState struct { - BaseState - // If true consuming one of the defined events causes its associated actions to be performed. If false all of the defined events must be consumed in order for actions to be performed - Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed - OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts - Timeout EventStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON ... -func (e *EventState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &e.BaseState); err != nil { - return err - } - - eventStateMap := make(map[string]interface{}) - if err := json.Unmarshal(data, &eventStateMap); err != nil { - return err - } - - e.Exclusive = true - - if eventStateMap["exclusive"] != nil { - exclusiveVal, ok := eventStateMap["exclusive"].(bool) - if ok { - e.Exclusive = exclusiveVal - } - } - - eventStateRaw := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &eventStateRaw); err != nil { - return err - } - if err := json.Unmarshal(eventStateRaw["onEvents"], &e.OnEvents); err != nil { - return err - } - if err := unmarshalKey("timeouts", eventStateRaw, &e.Timeout); err != nil { - return err - } - - return nil -} - -// EventStateTimeout ... -type EventStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - -// OperationState Defines actions be performed. Does not wait for incoming events -type OperationState struct { - BaseState - // Specifies whether actions are performed in sequence or in parallel - ActionMode ActionMode `json:"actionMode,omitempty"` - // Actions to be performed - Actions []Action `json:"actions" validate:"required,min=1,dive"` - // State specific timeouts - Timeouts OperationStateTimeout `json:"timeouts,omitempty"` -} - -// OperationStateTimeout ... -type OperationStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` -} - -// ParallelState Consists of a number of states that are executed in parallel -type ParallelState struct { - BaseState - // Branch Definitions - Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. - CompletionType CompletionType `json:"completionType,omitempty"` - // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." - NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` - // State specific timeouts - Timeouts ParallelStateTimeout `json:"timeouts,omitempty"` -} - -// ParallelStateTimeout ... -type ParallelStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` -} - -// InjectState ... -type InjectState struct { - BaseState - // JSON object which can be set as states data input and can be manipulated via filters - Data map[string]interface{} `json:"data" validate:"required,min=1"` - // State specific timeouts - Timeouts InjectStateTimeout `json:"timeouts,omitempty"` -} - -// InjectStateTimeout ... -type InjectStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - -// ForEachState ... -type ForEachState struct { - BaseState - // Workflow expression selecting an array element of the states data - InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration - OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array - IterationParam string `json:"iterationParam" validate:"required"` - // Specifies how upper bound on how many iterations may run in parallel - BatchSize intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection - Actions []Action `json:"actions,omitempty"` - // State specific timeout - Timeouts ForEachStateTimeout `json:"timeouts,omitempty"` - // Mode Specifies how iterations are to be performed (sequentially or in parallel) - Mode ForEachModeType `json:"mode,omitempty"` -} - -// ForEachStateTimeout ... -type ForEachStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` -} - -// CallbackState ... -type CallbackState struct { - BaseState - // Defines the action to be executed - Action Action `json:"action" validate:"required"` - // References a unique callback event name in the defined workflow events - EventRef string `json:"eventRef" validate:"required"` - // Time period to wait for incoming events (ISO 8601 format) - Timeouts CallbackStateTimeout `json:"timeouts" validate:"required"` - // Event data filter - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -// CallbackStateTimeout ... -type CallbackStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - -// SleepState ... -type SleepState struct { - BaseState - // Duration (ISO 8601 duration format) to sleep - Duration string `json:"duration" validate:"required"` - // Timeouts State specific timeouts - Timeouts SleepStateTimeout `json:"timeouts,omitempty"` -} - -// SleepStateTimeout ... -type SleepStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - -// BaseSwitchState ... -type BaseSwitchState struct { - BaseState - // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition - DefaultCondition DefaultCondition `json:"defaultCondition,omitempty"` -} - -// EventBasedSwitchState Permits transitions to other states based on events -type EventBasedSwitchState struct { - BaseSwitchState - // Defines conditions evaluated against events - EventConditions []EventCondition `json:"eventConditions" validate:"required,min=1,dive"` - // State specific timeouts - Timeouts EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON implementation for json Unmarshal function for the Eventbasedswitch type -func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { - return err - } - eventBasedSwitch := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &eventBasedSwitch); err != nil { - return err - } - var rawConditions []json.RawMessage - if err := unmarshalKey("timeouts", eventBasedSwitch, &j.Timeouts); err != nil { - return err - } - if err := json.Unmarshal(eventBasedSwitch["eventConditions"], &rawConditions); err != nil { - return err - } - - j.EventConditions = make([]EventCondition, len(rawConditions)) - var mapConditions map[string]interface{} - for i, rawCondition := range rawConditions { - if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { - return err - } - var condition EventCondition - if _, ok := mapConditions["end"]; ok { - condition = &EndEventCondition{} - } else { - condition = &TransitionEventCondition{} - } - if err := json.Unmarshal(rawCondition, condition); err != nil { - return err - } - j.EventConditions[i] = condition - } - return nil -} - -// EventBasedSwitchStateTimeout ... -type EventBasedSwitchStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - -// EventCondition ... -type EventCondition interface { - GetName() string - GetEventRef() string - GetEventDataFilter() EventDataFilter - GetMetadata() Metadata -} - -// BaseEventCondition ... -type BaseEventCondition struct { - // Event condition name - Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events - EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` - Metadata Metadata `json:"metadata,omitempty"` -} - -// GetEventRef ... -func (e *BaseEventCondition) GetEventRef() string { return e.EventRef } - -// GetEventDataFilter ... -func (e *BaseEventCondition) GetEventDataFilter() EventDataFilter { return e.EventDataFilter } - -// GetMetadata ... -func (e *BaseEventCondition) GetMetadata() Metadata { return e.Metadata } - -// GetName ... -func (e *BaseEventCondition) GetName() string { return e.Name } - -// TransitionEventCondition Switch state data event condition -type TransitionEventCondition struct { - BaseEventCondition - // Next transition of the workflow if there is valid matches - Transition Transition `json:"transition" validate:"required"` -} - -// EndEventCondition Switch state data event condition -type EndEventCondition struct { - BaseEventCondition - // Explicit transition to end - End End `json:"end" validate:"required"` -} - -// DataBasedSwitchState Permits transitions to other states based on data conditions -type DataBasedSwitchState struct { - BaseSwitchState - DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` - Timeouts DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON implementation for json Unmarshal function for the Databasedswitch type -func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { - return err - } - dataBasedSwitch := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &dataBasedSwitch); err != nil { - return err - } - var rawConditions []json.RawMessage - if err := unmarshalKey("timeouts", dataBasedSwitch, &j.Timeouts); err != nil { - return err - } - if err := json.Unmarshal(dataBasedSwitch["dataConditions"], &rawConditions); err != nil { - return err - } - j.DataConditions = make([]DataCondition, len(rawConditions)) - var mapConditions map[string]interface{} - for i, rawCondition := range rawConditions { - if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { - return err - } - var condition DataCondition - if _, ok := mapConditions["end"]; ok { - condition = &EndDataCondition{} - } else { - condition = &TransitionDataCondition{} - } - if err := json.Unmarshal(rawCondition, condition); err != nil { - return err - } - j.DataConditions[i] = condition - } - return nil -} - -// DataBasedSwitchStateTimeout ... -type DataBasedSwitchStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - -// DataCondition ... -type DataCondition interface { - GetName() string - GetCondition() string - GetMetadata() Metadata -} - -// BaseDataCondition ... -type BaseDataCondition struct { - // Data condition name - Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false - Condition string `json:"condition" validate:"required"` - Metadata Metadata `json:"metadata,omitempty"` -} - -// GetName ... -func (b *BaseDataCondition) GetName() string { return b.Name } - -// GetCondition ... -func (b *BaseDataCondition) GetCondition() string { return b.Condition } - -// GetMetadata ... -func (b *BaseDataCondition) GetMetadata() Metadata { return b.Metadata } - -// TransitionDataCondition ... -type TransitionDataCondition struct { - BaseDataCondition - // Workflow transition if condition is evaluated to true - Transition Transition `json:"transition" validate:"required"` -} - -// EndDataCondition ... -type EndDataCondition struct { - BaseDataCondition - // Workflow end definition - End End `json:"end" validate:"required"` -} diff --git a/model/task.go b/model/task.go new file mode 100644 index 0000000..4edbd40 --- /dev/null +++ b/model/task.go @@ -0,0 +1,410 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TaskBase struct { + // A runtime expression, if any, used to determine whether or not the task should be run. + If *RuntimeExpression `json:"if,omitempty" validate:"omitempty"` + // Configure the task's input. + Input *Input `json:"input,omitempty" validate:"omitempty"` + // Configure the task's output. + Output *Output `json:"output,omitempty" validate:"omitempty"` + // Export task output to context. + Export *Export `json:"export,omitempty" validate:"omitempty"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" validate:"omitempty"` + // The flow directive to be performed upon completion of the task. + Then *FlowDirective `json:"then,omitempty" validate:"omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` +} + +// Task represents a discrete unit of work in a workflow. +type Task interface { + GetBase() *TaskBase +} + +type NamedTaskMap map[string]Task + +// UnmarshalJSON for NamedTaskMap to ensure proper deserialization. +func (ntm *NamedTaskMap) UnmarshalJSON(data []byte) error { + var rawTasks map[string]json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for name, raw := range rawTasks { + task, err := unmarshalTask(name, raw) + if err != nil { + return err + } + + if *ntm == nil { + *ntm = make(map[string]Task) + } + (*ntm)[name] = task + } + + return nil +} + +// TaskList represents a list of named tasks to perform. +type TaskList []*TaskItem + +// Next gets the next item in the list based on the current index +func (tl *TaskList) Next(currentIdx int) (int, *TaskItem) { + if currentIdx == -1 || currentIdx >= len(*tl) { + return -1, nil + } + + current := (*tl)[currentIdx] + if current.GetBase() != nil && current.GetBase().Then != nil { + then := current.GetBase().Then + if then.IsTermination() { + return -1, nil + } + return tl.KeyAndIndex(then.Value) + } + + // Proceed sequentially if no 'then' is specified + if currentIdx+1 < len(*tl) { + return currentIdx + 1, (*tl)[currentIdx+1] + } + return -1, nil +} + +// UnmarshalJSON for TaskList to ensure proper deserialization. +func (tl *TaskList) UnmarshalJSON(data []byte) error { + var rawTasks []json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for _, raw := range rawTasks { + var taskItemRaw map[string]json.RawMessage + if err := json.Unmarshal(raw, &taskItemRaw); err != nil { + return err + } + + if len(taskItemRaw) != 1 { + return errors.New("each TaskItem must have exactly one key") + } + + for key, taskRaw := range taskItemRaw { + task, err := unmarshalTask(key, taskRaw) + if err != nil { + return err + } + *tl = append(*tl, &TaskItem{Key: key, Task: task}) + } + } + + return nil +} + +var taskTypeRegistry = map[string]func() Task{ + "call_http": func() Task { return &CallHTTP{} }, + "call_openapi": func() Task { return &CallOpenAPI{} }, + "call_grpc": func() Task { return &CallGRPC{} }, + "call_asyncapi": func() Task { return &CallAsyncAPI{} }, + "call": func() Task { return &CallFunction{} }, + "do": func() Task { return &DoTask{} }, + "fork": func() Task { return &ForkTask{} }, + "emit": func() Task { return &EmitTask{} }, + "for": func() Task { return &ForTask{} }, + "listen": func() Task { return &ListenTask{} }, + "raise": func() Task { return &RaiseTask{} }, + "run": func() Task { return &RunTask{} }, + "set": func() Task { return &SetTask{} }, + "switch": func() Task { return &SwitchTask{} }, + "try": func() Task { return &TryTask{} }, + "wait": func() Task { return &WaitTask{} }, +} + +func unmarshalTask(key string, taskRaw json.RawMessage) (Task, error) { + var taskType map[string]interface{} + if err := json.Unmarshal(taskRaw, &taskType); err != nil { + return nil, fmt.Errorf("failed to parse task type for key '%s': %w", key, err) + } + + // TODO: not the most elegant; can be improved in a smarter way + + // Determine task type + var task Task + if callValue, hasCall := taskType["call"].(string); hasCall { + // Form composite key and check if it's in the registry + registryKey := fmt.Sprintf("call_%s", callValue) + if constructor, exists := taskTypeRegistry[registryKey]; exists { + task = constructor() + } else { + // Default to CallFunction for unrecognized call values + task = &CallFunction{} + } + } else if _, hasFor := taskType["for"]; hasFor { + // Handle special case "for" that also has "do" + task = taskTypeRegistry["for"]() + } else { + // Handle everything else (e.g., "do", "fork") + for typeKey := range taskType { + if constructor, exists := taskTypeRegistry[typeKey]; exists { + task = constructor() + break + } + } + } + + if task == nil { + return nil, fmt.Errorf("unknown task type for key '%s'", key) + } + + // Populate the task with raw data + if err := json.Unmarshal(taskRaw, task); err != nil { + return nil, fmt.Errorf("failed to unmarshal task '%s': %w", key, err) + } + + return task, nil +} + +// MarshalJSON for TaskList to ensure proper serialization. +func (tl *TaskList) MarshalJSON() ([]byte, error) { + return json.Marshal([]*TaskItem(*tl)) +} + +// Key retrieves a TaskItem by its key. +func (tl *TaskList) Key(key string) *TaskItem { + _, keyItem := tl.KeyAndIndex(key) + return keyItem +} + +func (tl *TaskList) KeyAndIndex(key string) (int, *TaskItem) { + for i, item := range *tl { + if item.Key == key { + return i, item + } + } + // TODO: Add logging here for missing task references + return -1, nil +} + +// TaskItem represents a named task and its associated definition. +type TaskItem struct { + Key string `json:"-" validate:"required"` + Task Task `json:"-" validate:"required"` +} + +// MarshalJSON for TaskItem to ensure proper serialization as a key-value pair. +func (ti *TaskItem) MarshalJSON() ([]byte, error) { + if ti == nil { + return nil, fmt.Errorf("cannot marshal a nil TaskItem") + } + + // Serialize the Task + taskJSON, err := json.Marshal(ti.Task) + if err != nil { + return nil, fmt.Errorf("failed to marshal task: %w", err) + } + + // Create a map with the Key and Task + taskEntry := map[string]json.RawMessage{ + ti.Key: taskJSON, + } + + // Marshal the map into JSON + return json.Marshal(taskEntry) +} + +func (ti *TaskItem) GetBase() *TaskBase { + return ti.Task.GetBase() +} + +// AsCallHTTPTask casts the Task to a CallTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallHTTPTask() *CallHTTP { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallHTTP); ok { + return task + } + return nil +} + +// AsCallOpenAPITask casts the Task to a CallOpenAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallOpenAPITask() *CallOpenAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallOpenAPI); ok { + return task + } + return nil +} + +// AsCallGRPCTask casts the Task to a CallGRPC task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallGRPCTask() *CallGRPC { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallGRPC); ok { + return task + } + return nil +} + +// AsCallAsyncAPITask casts the Task to a CallAsyncAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallAsyncAPITask() *CallAsyncAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallAsyncAPI); ok { + return task + } + return nil +} + +// AsCallFunctionTask casts the Task to a CallFunction task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallFunctionTask() *CallFunction { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallFunction); ok { + return task + } + return nil +} + +// AsDoTask casts the Task to a DoTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsDoTask() *DoTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*DoTask); ok { + return task + } + return nil +} + +// AsForkTask casts the Task to a ForkTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForkTask() *ForkTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForkTask); ok { + return task + } + return nil +} + +// AsEmitTask casts the Task to an EmitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsEmitTask() *EmitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*EmitTask); ok { + return task + } + return nil +} + +// AsForTask casts the Task to a ForTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForTask() *ForTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForTask); ok { + return task + } + return nil +} + +// AsListenTask casts the Task to a ListenTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsListenTask() *ListenTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ListenTask); ok { + return task + } + return nil +} + +// AsRaiseTask casts the Task to a RaiseTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRaiseTask() *RaiseTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RaiseTask); ok { + return task + } + return nil +} + +// AsRunTask casts the Task to a RunTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRunTask() *RunTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RunTask); ok { + return task + } + return nil +} + +// AsSetTask casts the Task to a SetTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSetTask() *SetTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SetTask); ok { + return task + } + return nil +} + +// AsSwitchTask casts the Task to a SwitchTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSwitchTask() *SwitchTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SwitchTask); ok { + return task + } + return nil +} + +// AsTryTask casts the Task to a TryTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsTryTask() *TryTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*TryTask); ok { + return task + } + return nil +} + +// AsWaitTask casts the Task to a WaitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsWaitTask() *WaitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*WaitTask); ok { + return task + } + return nil +} diff --git a/model/task_call.go b/model/task_call.go new file mode 100644 index 0000000..c3e83df --- /dev/null +++ b/model/task_call.go @@ -0,0 +1,132 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +type CallHTTP struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=http"` + With HTTPArguments `json:"with" validate:"required"` +} + +func (c *CallHTTP) GetBase() *TaskBase { + return &c.TaskBase +} + +type HTTPArguments struct { + Method string `json:"method" validate:"required,oneofci=GET POST PUT DELETE PATCH"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` + Headers map[string]string `json:"headers,omitempty"` + Body json.RawMessage `json:"body,omitempty"` + Query map[string]interface{} `json:"query,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallOpenAPI struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=openapi"` + With OpenAPIArguments `json:"with" validate:"required"` +} + +func (c *CallOpenAPI) GetBase() *TaskBase { + return &c.TaskBase +} + +type OpenAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + OperationID string `json:"operationId" validate:"required"` + Parameters map[string]interface{} `json:"parameters,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallGRPC struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=grpc"` + With GRPCArguments `json:"with" validate:"required"` +} + +func (c *CallGRPC) GetBase() *TaskBase { + return &c.TaskBase +} + +type GRPCArguments struct { + Proto *ExternalResource `json:"proto" validate:"required"` + Service GRPCService `json:"service" validate:"required"` + Method string `json:"method" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type GRPCService struct { + Name string `json:"name" validate:"required"` + Host string `json:"host" validate:"required,hostname_rfc1123"` + Port int `json:"port" validate:"required,min=0,max=65535"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +type CallAsyncAPI struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=asyncapi"` + With AsyncAPIArguments `json:"with" validate:"required"` +} + +func (c *CallAsyncAPI) GetBase() *TaskBase { + return &c.TaskBase +} + +type AsyncAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + Channel string `json:"channel,omitempty"` + Operation string `json:"operation,omitempty"` + Server *AsyncAPIServer `json:"server,omitempty"` + Protocol string `json:"protocol,omitempty" validate:"omitempty,oneof=amqp amqp1 anypointmq googlepubsub http ibmmq jms kafka mercure mqtt mqtt5 nats pulsar redis sns solace sqs stomp ws"` + Message *AsyncAPIOutboundMessage `json:"message,omitempty"` + Subscription *AsyncAPISubscription `json:"subscription,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type AsyncAPIServer struct { + Name string `json:"name" validate:"required"` + Variables map[string]interface{} `json:"variables,omitempty"` +} + +type AsyncAPIOutboundMessage struct { + Payload map[string]interface{} `json:"payload,omitempty" validate:"omitempty"` + Headers map[string]interface{} `json:"headers,omitempty" validate:"omitempty"` +} + +type AsyncAPISubscription struct { + Filter *RuntimeExpression `json:"filter,omitempty"` + Consume *AsyncAPIMessageConsumptionPolicy `json:"consume" validate:"required"` +} + +type AsyncAPIMessageConsumptionPolicy struct { + For *Duration `json:"for,omitempty"` + Amount int `json:"amount,omitempty" validate:"required_without_all=While Until"` + While *RuntimeExpression `json:"while,omitempty" validate:"required_without_all=Amount Until"` + Until *RuntimeExpression `json:"until,omitempty" validate:"required_without_all=Amount While"` +} + +type CallFunction struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required"` + With map[string]interface{} `json:"with,omitempty"` +} + +func (c *CallFunction) GetBase() *TaskBase { + return &c.TaskBase +} diff --git a/model/task_call_test.go b/model/task_call_test.go new file mode 100644 index 0000000..0d10e69 --- /dev/null +++ b/model/task_call_test.go @@ -0,0 +1,480 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCallHTTP_MarshalJSON(t *testing.T) { + callHTTP := CallHTTP{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: string(FlowDirectiveContinue)}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com"}, + }, + Headers: map[string]string{ + "Authorization": "Bearer token", + }, + Query: map[string]interface{}{ + "q": "search", + }, + Output: "content", + }, + } + + data, err := json.Marshal(callHTTP) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }`, string(data)) +} + +func TestCallHTTP_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }` + + var callHTTP CallHTTP + err := json.Unmarshal([]byte(jsonData), &callHTTP) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${condition}"}, callHTTP.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callHTTP.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callHTTP.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callHTTP.Timeout) + assert.Equal(t, &FlowDirective{Value: string(FlowDirectiveContinue)}, callHTTP.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callHTTP.Metadata) + assert.Equal(t, "http", callHTTP.Call) + assert.Equal(t, "GET", callHTTP.With.Method) + assert.Equal(t, "http://example.com", callHTTP.With.Endpoint.String()) + assert.Equal(t, map[string]string{"Authorization": "Bearer token"}, callHTTP.With.Headers) + assert.Equal(t, map[string]interface{}{"q": "search"}, callHTTP.With.Query) + assert.Equal(t, "content", callHTTP.With.Output) +} + +func TestCallOpenAPI_MarshalJSON(t *testing.T) { + authPolicy := "my-auth" + callOpenAPI := CallOpenAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "MyOpenAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/openapi.json"}, + }, + }, + OperationID: "getUsers", + Parameters: map[string]interface{}{ + "param1": "value1", + "param2": "value2", + }, + Authentication: &ReferenceableAuthenticationPolicy{ + Use: &authPolicy, + }, + Output: "content", + }, + } + + data, err := json.Marshal(callOpenAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": "http://example.com/openapi.json" + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }`, string(data)) +} + +func TestCallOpenAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": { "uri": "http://example.com/openapi.json" } + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }` + + var callOpenAPI CallOpenAPI + err := json.Unmarshal([]byte(jsonData), &callOpenAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callOpenAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callOpenAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callOpenAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callOpenAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callOpenAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callOpenAPI.Metadata) + assert.Equal(t, "openapi", callOpenAPI.Call) + assert.Equal(t, "MyOpenAPIDoc", callOpenAPI.With.Document.Name) + assert.Equal(t, "http://example.com/openapi.json", callOpenAPI.With.Document.Endpoint.EndpointConfig.URI.String()) + assert.Equal(t, "getUsers", callOpenAPI.With.OperationID) + assert.Equal(t, map[string]interface{}{"param1": "value1", "param2": "value2"}, callOpenAPI.With.Parameters) + assert.Equal(t, "my-auth", *callOpenAPI.With.Authentication.Use) + assert.Equal(t, "content", callOpenAPI.With.Output) +} + +func TestCallGRPC_MarshalJSON(t *testing.T) { + callGRPC := CallGRPC{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "grpc", + With: GRPCArguments{ + Proto: &ExternalResource{ + Name: "MyProtoFile", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/protofile"}, + }, + }, + Service: GRPCService{ + Name: "UserService", + Host: "example.com", + Port: 50051, + }, + Method: "GetUser", + Arguments: map[string]interface{}{"userId": "12345"}, + }, + } + + data, err := json.Marshal(callGRPC) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }`, string(data)) +} + +func TestCallGRPC_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }` + + var callGRPC CallGRPC + err := json.Unmarshal([]byte(jsonData), &callGRPC) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callGRPC.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callGRPC.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callGRPC.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callGRPC.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callGRPC.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callGRPC.Metadata) + assert.Equal(t, "grpc", callGRPC.Call) + assert.Equal(t, "MyProtoFile", callGRPC.With.Proto.Name) + assert.Equal(t, "http://example.com/protofile", callGRPC.With.Proto.Endpoint.String()) + assert.Equal(t, "UserService", callGRPC.With.Service.Name) + assert.Equal(t, "example.com", callGRPC.With.Service.Host) + assert.Equal(t, 50051, callGRPC.With.Service.Port) + assert.Equal(t, "GetUser", callGRPC.With.Method) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callGRPC.With.Arguments) +} + +func TestCallAsyncAPI_MarshalJSON(t *testing.T) { + callAsyncAPI := CallAsyncAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "asyncapi", + With: AsyncAPIArguments{ + Document: &ExternalResource{ + Name: "MyAsyncAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/asyncapi.json"}, + }, + }, + Operation: "user.signup", + Server: &AsyncAPIServer{Name: "default-server"}, + Message: &AsyncAPIOutboundMessage{Payload: map[string]interface{}{"userId": "12345"}}, + Protocol: "http", + }, + } + + data, err := json.Marshal(callAsyncAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server" }, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + } + } + }`, string(data)) +} + +func TestCallAsyncAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server"}, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + }, + "authentication": { + "use": "asyncapi-auth-policy" + } + } + }` + + var callAsyncAPI CallAsyncAPI + err := json.Unmarshal([]byte(jsonData), &callAsyncAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callAsyncAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callAsyncAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callAsyncAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callAsyncAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callAsyncAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callAsyncAPI.Metadata) + assert.Equal(t, "asyncapi", callAsyncAPI.Call) + assert.Equal(t, "MyAsyncAPIDoc", callAsyncAPI.With.Document.Name) + assert.Equal(t, "http://example.com/asyncapi.json", callAsyncAPI.With.Document.Endpoint.String()) + assert.Equal(t, "user.signup", callAsyncAPI.With.Operation) + assert.Equal(t, "default-server", callAsyncAPI.With.Server.Name) + assert.Equal(t, "http", callAsyncAPI.With.Protocol) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callAsyncAPI.With.Message.Payload) + assert.Equal(t, "asyncapi-auth-policy", *callAsyncAPI.With.Authentication.Use) +} + +func TestCallFunction_MarshalJSON(t *testing.T) { + callFunction := CallFunction{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "myFunction", + With: map[string]interface{}{ + "param1": "value1", + "param2": 42, + }, + } + + data, err := json.Marshal(callFunction) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }`, string(data)) +} + +func TestCallFunction_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }` + + var callFunction CallFunction + err := json.Unmarshal([]byte(jsonData), &callFunction) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callFunction.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callFunction.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callFunction.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callFunction.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callFunction.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callFunction.Metadata) + assert.Equal(t, "myFunction", callFunction.Call) + + // Adjust numeric values for comparison + expectedWith := map[string]interface{}{ + "param1": "value1", + "param2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedWith, callFunction.With) +} diff --git a/model/task_do.go b/model/task_do.go new file mode 100644 index 0000000..f1dca25 --- /dev/null +++ b/model/task_do.go @@ -0,0 +1,25 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// DoTask represents a task configuration to execute tasks sequentially. +type DoTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Do *TaskList `json:"do" validate:"required,dive"` +} + +func (d *DoTask) GetBase() *TaskBase { + return &d.TaskBase +} diff --git a/model/task_do_test.go b/model/task_do_test.go new file mode 100644 index 0000000..4a337fe --- /dev/null +++ b/model/task_do_test.go @@ -0,0 +1,103 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDoTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var doTask DoTask + err := json.Unmarshal([]byte(jsonData), &doTask) + assert.NoError(t, err) + + task1 := doTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := doTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestDoTask_MarshalJSON(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(doTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestDoTask_Validation(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(doTask) + assert.Error(t, err) +} diff --git a/model/task_event.go b/model/task_event.go new file mode 100644 index 0000000..5df1ab6 --- /dev/null +++ b/model/task_event.go @@ -0,0 +1,290 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// EmitTask represents the configuration for emitting events. +type EmitTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Emit EmitTaskConfiguration `json:"emit" validate:"required"` +} + +func (e *EmitTask) GetBase() *TaskBase { + return &e.TaskBase +} + +func (e *EmitTask) MarshalJSON() ([]byte, error) { + type Alias EmitTask // Prevent recursion + return json.Marshal((*Alias)(e)) +} + +// ListenTask represents a task configuration to listen to events. +type ListenTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Listen ListenTaskConfiguration `json:"listen" validate:"required"` +} + +func (lt *ListenTask) GetBase() *TaskBase { + return <.TaskBase +} + +type ListenTaskConfiguration struct { + To *EventConsumptionStrategy `json:"to" validate:"required"` +} + +// MarshalJSON for ListenTask to ensure proper serialization. +func (lt *ListenTask) MarshalJSON() ([]byte, error) { + type Alias ListenTask + return json.Marshal((*Alias)(lt)) +} + +// UnmarshalJSON for ListenTask to ensure proper deserialization. +func (lt *ListenTask) UnmarshalJSON(data []byte) error { + type Alias ListenTask + alias := (*Alias)(lt) + return json.Unmarshal(data, alias) +} + +type EmitTaskConfiguration struct { + Event EmitEventDefinition `json:"event" validate:"required"` +} + +type EmitEventDefinition struct { + With *EventProperties `json:"with" validate:"required"` +} + +type EventProperties struct { + ID string `json:"id,omitempty"` + Source *URITemplateOrRuntimeExpr `json:"source,omitempty" validate:"omitempty"` // URI template or runtime expression + Type string `json:"type,omitempty"` + Time *StringOrRuntimeExpr `json:"time,omitempty" validate:"omitempty,string_or_runtime_expr"` // ISO 8601 date-time string or runtime expression + Subject string `json:"subject,omitempty"` + DataContentType string `json:"datacontenttype,omitempty"` + DataSchema *URITemplateOrRuntimeExpr `json:"dataschema,omitempty" validate:"omitempty"` // URI template or runtime expression + Additional map[string]interface{} `json:"-"` +} + +// UnmarshalJSON implements custom unmarshaling for EventProperties. +func (e *EventProperties) UnmarshalJSON(data []byte) error { + type Alias EventProperties // Prevent recursion + alias := &struct { + Additional map[string]interface{} `json:"-"` // Inline the additional properties + *Alias + }{ + Alias: (*Alias)(e), + } + + // Decode the entire JSON into a map to capture additional properties + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties: %w", err) + } + + // Unmarshal known fields into the alias + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties fields: %w", err) + } + + // Validate fields requiring custom unmarshaling + if e.Source != nil && e.Source.Value == nil { + return fmt.Errorf("invalid Source: must be a valid URI template or runtime expression") + } + + if e.DataSchema != nil && e.DataSchema.Value == nil { + return fmt.Errorf("invalid DataSchema: must be a valid URI template or runtime expression") + } + + // Extract additional properties by removing known keys + for key := range raw { + switch key { + case "id", "source", "type", "time", "subject", "datacontenttype", "dataschema": + delete(raw, key) + } + } + + e.Additional = raw + return nil +} + +// MarshalJSON implements custom marshaling for EventProperties. +func (e *EventProperties) MarshalJSON() ([]byte, error) { + // Create a map for known fields + known := make(map[string]interface{}) + + if e.ID != "" { + known["id"] = e.ID + } + if e.Source != nil { + known["source"] = e.Source + } + if e.Type != "" { + known["type"] = e.Type + } + if e.Time != nil { + known["time"] = e.Time + } + if e.Subject != "" { + known["subject"] = e.Subject + } + if e.DataContentType != "" { + known["datacontenttype"] = e.DataContentType + } + if e.DataSchema != nil { + known["dataschema"] = e.DataSchema + } + + // Merge additional properties + for key, value := range e.Additional { + known[key] = value + } + + return json.Marshal(known) +} + +// EventFilter defines a mechanism to filter events based on predefined criteria. +type EventFilter struct { + With *EventProperties `json:"with" validate:"required"` + Correlate map[string]Correlation `json:"correlate,omitempty" validate:"omitempty,dive"` // Keyed correlation filters +} + +// Correlation defines the mapping of event attributes for correlation. +type Correlation struct { + From string `json:"from" validate:"required"` // Runtime expression to extract the correlation value + Expect string `json:"expect,omitempty"` // Expected value or runtime expression +} + +// EventConsumptionStrategy defines the consumption strategy for events. +type EventConsumptionStrategy struct { + All []*EventFilter `json:"all,omitempty" validate:"omitempty,dive"` + Any []*EventFilter `json:"any,omitempty" validate:"omitempty,dive"` + One *EventFilter `json:"one,omitempty" validate:"omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty" validate:"omitempty"` +} + +// EventConsumptionUntil handles the complex conditions of the "until" field. +type EventConsumptionUntil struct { + Condition *RuntimeExpression `json:"-" validate:"omitempty"` + Strategy *EventConsumptionStrategy `json:"-" validate:"omitempty"` + IsDisabled bool `json:"-"` // True when "until: false" +} + +// UnmarshalJSON for EventConsumptionUntil to handle the "oneOf" behavior. +func (ecu *EventConsumptionUntil) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventConsumptionUntil: %w", err) + } + + switch v := raw.(type) { + case bool: + if !v { + ecu.IsDisabled = true + } else { + return fmt.Errorf("invalid value for 'until': true is not supported") + } + case string: + ecu.Condition = &RuntimeExpression{Value: v} + case map[string]interface{}: + strategyData, err := json.Marshal(v) + if err != nil { + return fmt.Errorf("failed to marshal 'until' strategy: %w", err) + } + var strategy EventConsumptionStrategy + if err := json.Unmarshal(strategyData, &strategy); err != nil { + return fmt.Errorf("failed to unmarshal 'until' strategy: %w", err) + } + ecu.Strategy = &strategy + default: + return fmt.Errorf("invalid type for 'until'") + } + + return nil +} + +// MarshalJSON for EventConsumptionUntil to handle proper serialization. +func (ecu *EventConsumptionUntil) MarshalJSON() ([]byte, error) { + if ecu.IsDisabled { + return json.Marshal(false) + } + if ecu.Condition != nil { + // Serialize the condition directly + return json.Marshal(ecu.Condition.Value) + } + if ecu.Strategy != nil { + // Serialize the nested strategy + return json.Marshal(ecu.Strategy) + } + // Return null if nothing is set + return json.Marshal(nil) +} + +// UnmarshalJSON for EventConsumptionStrategy to enforce "oneOf" behavior and handle edge cases. +func (ecs *EventConsumptionStrategy) UnmarshalJSON(data []byte) error { + temp := struct { + All []*EventFilter `json:"all"` + Any []*EventFilter `json:"any"` + One *EventFilter `json:"one"` + Until *EventConsumptionUntil `json:"until"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields (ignoring empty lists for `all` and `any`) + count := 0 + if len(temp.All) > 0 { + count++ + ecs.All = temp.All + } + if len(temp.Any) > 0 || temp.Until != nil { + count++ + ecs.Any = temp.Any + ecs.Until = temp.Until + } + if temp.One != nil { + count++ + ecs.One = temp.One + } + + // Ensure only one primary field (all, any, one) is set + if count > 1 { + return errors.New("invalid EventConsumptionStrategy: only one primary strategy type (all, any, or one) must be specified") + } + + return nil +} + +// MarshalJSON for EventConsumptionStrategy to ensure proper serialization. +func (ecs *EventConsumptionStrategy) MarshalJSON() ([]byte, error) { + temp := struct { + All []*EventFilter `json:"all,omitempty"` + Any []*EventFilter `json:"any,omitempty"` + One *EventFilter `json:"one,omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty"` + }{ + All: ecs.All, + Any: ecs.Any, + One: ecs.One, + Until: ecs.Until, + } + + return json.Marshal(temp) +} diff --git a/model/task_event_test.go b/model/task_event_test.go new file mode 100644 index 0000000..45c92a7 --- /dev/null +++ b/model/task_event_test.go @@ -0,0 +1,231 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEmitTask_MarshalJSON(t *testing.T) { + emitTask := &EmitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Emit: EmitTaskConfiguration{ + Event: EmitEventDefinition{ + With: &EventProperties{ + ID: "event-id", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + Type: "example.event.type", + Time: &StringOrRuntimeExpr{Value: "2023-01-01T00:00:00Z"}, + Subject: "example.subject", + DataContentType: "application/json", + DataSchema: &URITemplateOrRuntimeExpr{Value: "http://example.com/schema"}, + Additional: map[string]interface{}{ + "extra": "value", + }, + }, + }, + }, + } + + data, err := json.Marshal(emitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }`, string(data)) +} + +func TestEmitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }` + + var emitTask EmitTask + err := json.Unmarshal([]byte(jsonData), &emitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, emitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, emitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, emitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, emitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, emitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, emitTask.Metadata) + assert.Equal(t, "event-id", emitTask.Emit.Event.With.ID) + assert.Equal(t, "http://example.com/source", emitTask.Emit.Event.With.Source.String()) + assert.Equal(t, "example.event.type", emitTask.Emit.Event.With.Type) + assert.Equal(t, "2023-01-01T00:00:00Z", emitTask.Emit.Event.With.Time.String()) + assert.Equal(t, "example.subject", emitTask.Emit.Event.With.Subject) + assert.Equal(t, "application/json", emitTask.Emit.Event.With.DataContentType) + assert.Equal(t, "http://example.com/schema", emitTask.Emit.Event.With.DataSchema.String()) + assert.Equal(t, map[string]interface{}{"extra": "value"}, emitTask.Emit.Event.With.Additional) +} + +func TestListenTask_MarshalJSON_WithUntilCondition(t *testing.T) { + listenTask := ListenTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Listen: ListenTaskConfiguration{ + To: &EventConsumptionStrategy{ + Any: []*EventFilter{ + { + With: &EventProperties{ + Type: "example.event.type", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + }, + }, + }, + Until: &EventConsumptionUntil{ + Condition: NewRuntimeExpression("workflow.data.condition == true"), + }, + }, + }, + } + + data, err := json.Marshal(listenTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "listen": { + "to": { + "any": [ + { + "with": { + "type": "example.event.type", + "source": "http://example.com/source" + } + } + ], + "until": "workflow.data.condition == true" + } + } + }`, string(data)) +} + +func TestEventConsumptionUntil_MarshalJSON(t *testing.T) { + tests := []struct { + name string + until *EventConsumptionUntil + expected string + shouldErr bool + }{ + { + name: "Until Disabled", + until: &EventConsumptionUntil{ + IsDisabled: true, + }, + expected: `false`, + shouldErr: false, + }, + { + name: "Until Condition Set", + until: &EventConsumptionUntil{ + Condition: &RuntimeExpression{Value: "workflow.data.condition == true"}, + }, + expected: `"workflow.data.condition == true"`, + shouldErr: false, + }, + { + name: "Until Nested Strategy", + until: &EventConsumptionUntil{ + Strategy: &EventConsumptionStrategy{ + One: &EventFilter{ + With: &EventProperties{Type: "example.event.type"}, + }, + }, + }, + expected: `{"one":{"with":{"type":"example.event.type"}}}`, + shouldErr: false, + }, + { + name: "Until Nil", + until: &EventConsumptionUntil{}, + expected: `null`, + shouldErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.until) + if test.shouldErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expected, string(data)) + } + }) + } +} diff --git a/model/task_for.go b/model/task_for.go new file mode 100644 index 0000000..5fc84ec --- /dev/null +++ b/model/task_for.go @@ -0,0 +1,34 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForTask represents a task configuration to iterate over a collection. +type ForTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + For ForTaskConfiguration `json:"for" validate:"required"` + While string `json:"while,omitempty"` + Do *TaskList `json:"do" validate:"required,dive"` +} + +func (f *ForTask) GetBase() *TaskBase { + return &f.TaskBase +} + +// ForTaskConfiguration defines the loop configuration for iterating over a collection. +type ForTaskConfiguration struct { + Each string `json:"each,omitempty"` // Variable name for the current item + In string `json:"in" validate:"required"` // Runtime expression for the collection + At string `json:"at,omitempty"` // Variable name for the current index +} diff --git a/model/task_for_test.go b/model/task_for_test.go new file mode 100644 index 0000000..3d8fc37 --- /dev/null +++ b/model/task_for_test.go @@ -0,0 +1,151 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "sigs.k8s.io/yaml" + + "github.com/stretchr/testify/assert" +) + +func TestForTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var forTask ForTask + err := json.Unmarshal([]byte(jsonData), &forTask) + assert.NoError(t, err) + assert.Equal(t, "item", forTask.For.Each) + assert.Equal(t, "${items}", forTask.For.In) + assert.Equal(t, "index", forTask.For.At) + assert.Equal(t, "${condition}", forTask.While) + + task1 := forTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := forTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForTask_MarshalJSON(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(forTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestForTask_Validation(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(forTask) + assert.Error(t, err) +} + +func TestForTaskValidation(t *testing.T) { + rawYaml := ` +for: + each: pet + in: .pets + at: index +while: .vet != null +do: + - waitForCheckup: + listen: + to: + one: + with: + type: com.fake.petclinic.pets.checkup.completed.v2 + output: + as: '.pets + [{ "id": $pet.id }]' +` + + var forTask ForTask + err := yaml.Unmarshal([]byte(rawYaml), &forTask) + assert.NoError(t, err, "Failed to unmarshal ForTask") + + err = validate.Struct(forTask) + assert.NoError(t, err, "Failed to validate ForTask") +} diff --git a/model/task_fork.go b/model/task_fork.go new file mode 100644 index 0000000..1511729 --- /dev/null +++ b/model/task_fork.go @@ -0,0 +1,31 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForkTask represents a task configuration to execute multiple tasks concurrently. +type ForkTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Fork ForkTaskConfiguration `json:"fork" validate:"required"` +} + +func (f *ForkTask) GetBase() *TaskBase { + return &f.TaskBase +} + +// ForkTaskConfiguration defines the configuration for the branches to perform concurrently. +type ForkTaskConfiguration struct { + Branches *TaskList `json:"branches" validate:"required,dive"` + Compete bool `json:"compete,omitempty"` +} diff --git a/model/task_fork_test.go b/model/task_fork_test.go new file mode 100644 index 0000000..04b4f19 --- /dev/null +++ b/model/task_fork_test.go @@ -0,0 +1,116 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestForkTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ], + "compete": true + } + }` + + var forkTask ForkTask + err := json.Unmarshal([]byte(jsonData), &forkTask) + assert.NoError(t, err) + assert.Equal(t, true, forkTask.Fork.Compete) + + task1 := forkTask.Fork.Branches.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := forkTask.Fork.Branches.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForkTask_MarshalJSON(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + data, err := json.Marshal(forkTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ], + "compete": true + } + }`, string(data)) +} + +func TestForkTask_Validation(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + err := validate.Struct(forkTask) + assert.Error(t, err) +} diff --git a/model/task_raise.go b/model/task_raise.go new file mode 100644 index 0000000..5dafd55 --- /dev/null +++ b/model/task_raise.go @@ -0,0 +1,72 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// RaiseTask represents a task configuration to raise errors. +type RaiseTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Raise RaiseTaskConfiguration `json:"raise" validate:"required"` +} + +func (r *RaiseTask) GetBase() *TaskBase { + return &r.TaskBase +} + +type RaiseTaskConfiguration struct { + Error RaiseTaskError `json:"error" validate:"required"` +} + +type RaiseTaskError struct { + Definition *Error `json:"-"` + Ref *string `json:"-"` +} + +// UnmarshalJSON for RaiseTaskError to enforce "oneOf" behavior. +func (rte *RaiseTaskError) UnmarshalJSON(data []byte) error { + // Try to unmarshal into a string (Ref) + var ref string + if err := json.Unmarshal(data, &ref); err == nil { + rte.Ref = &ref + rte.Definition = nil + return nil + } + + // Try to unmarshal into an Error (Definition) + var def Error + if err := json.Unmarshal(data, &def); err == nil { + rte.Definition = &def + rte.Ref = nil + return nil + } + + // If neither worked, return an error + return errors.New("invalid RaiseTaskError: data must be either a string (reference) or an object (definition)") +} + +// MarshalJSON for RaiseTaskError to ensure proper serialization. +func (rte *RaiseTaskError) MarshalJSON() ([]byte, error) { + if rte.Definition != nil { + return json.Marshal(rte.Definition) + } + if rte.Ref != nil { + return json.Marshal(*rte.Ref) + } + return nil, errors.New("invalid RaiseTaskError: neither 'definition' nor 'reference' is set") +} diff --git a/model/task_raise_test.go b/model/task_raise_test.go new file mode 100644 index 0000000..1aa3d3b --- /dev/null +++ b/model/task_raise_test.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRaiseTask_MarshalJSON(t *testing.T) { + raiseTask := &RaiseTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Raise: RaiseTaskConfiguration{ + Error: RaiseTaskError{ + Definition: &Error{ + Type: &URITemplateOrRuntimeExpr{Value: "http://example.com/error"}, + Status: 500, + Title: NewStringOrRuntimeExpr("Internal Server Error"), + Detail: NewStringOrRuntimeExpr("An unexpected error occurred."), + }, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }`, string(data)) +} + +func TestRaiseTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }` + + var raiseTask *RaiseTask + err := json.Unmarshal([]byte(jsonData), &raiseTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, raiseTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, raiseTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, raiseTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, raiseTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, raiseTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, raiseTask.Metadata) + assert.Equal(t, "http://example.com/error", raiseTask.Raise.Error.Definition.Type.String()) + assert.Equal(t, 500, raiseTask.Raise.Error.Definition.Status) + assert.Equal(t, "Internal Server Error", raiseTask.Raise.Error.Definition.Title.String()) + assert.Equal(t, "An unexpected error occurred.", raiseTask.Raise.Error.Definition.Detail.String()) +} diff --git a/model/task_run.go b/model/task_run.go new file mode 100644 index 0000000..b589cfa --- /dev/null +++ b/model/task_run.go @@ -0,0 +1,128 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// RunTask represents a task configuration to execute external processes. +type RunTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Run RunTaskConfiguration `json:"run" validate:"required"` +} + +func (r *RunTask) GetBase() *TaskBase { + return &r.TaskBase +} + +type RunTaskConfiguration struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` +} + +type Container struct { + Image string `json:"image" validate:"required"` + Command string `json:"command,omitempty"` + Ports map[string]interface{} `json:"ports,omitempty"` + Volumes map[string]interface{} `json:"volumes,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type Script struct { + Language string `json:"language" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` + InlineCode *string `json:"code,omitempty"` + External *ExternalResource `json:"source,omitempty"` +} + +type Shell struct { + Command string `json:"command" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type RunWorkflow struct { + Namespace string `json:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" validate:"required,semver_pattern"` + Input map[string]interface{} `json:"input,omitempty"` +} + +// UnmarshalJSON for RunTaskConfiguration to enforce "oneOf" behavior. +func (rtc *RunTaskConfiguration) UnmarshalJSON(data []byte) error { + temp := struct { + Await *bool `json:"await"` + Container *Container `json:"container"` + Script *Script `json:"script"` + Shell *Shell `json:"shell"` + Workflow *RunWorkflow `json:"workflow"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if temp.Container != nil { + count++ + rtc.Container = temp.Container + } + if temp.Script != nil { + count++ + rtc.Script = temp.Script + } + if temp.Shell != nil { + count++ + rtc.Shell = temp.Shell + } + if temp.Workflow != nil { + count++ + rtc.Workflow = temp.Workflow + } + + // Ensure only one of the options is set + if count != 1 { + return errors.New("invalid RunTaskConfiguration: only one of 'container', 'script', 'shell', or 'workflow' must be specified") + } + + rtc.Await = temp.Await + return nil +} + +// MarshalJSON for RunTaskConfiguration to ensure proper serialization. +func (rtc *RunTaskConfiguration) MarshalJSON() ([]byte, error) { + temp := struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` + }{ + Await: rtc.Await, + Container: rtc.Container, + Script: rtc.Script, + Shell: rtc.Shell, + Workflow: rtc.Workflow, + } + + return json.Marshal(temp) +} diff --git a/model/task_run_test.go b/model/task_run_test.go new file mode 100644 index 0000000..026b9c8 --- /dev/null +++ b/model/task_run_test.go @@ -0,0 +1,196 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRunTask_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Container: &Container{ + Image: "example-image", + Command: "example-command", + Ports: map[string]interface{}{ + "8080": "80", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }`, string(data)) +} + +func TestRunTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "example-image", runTask.Run.Container.Image) + assert.Equal(t, "example-command", runTask.Run.Container.Command) + assert.Equal(t, map[string]interface{}{"8080": "80"}, runTask.Run.Container.Ports) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Container.Environment) +} + +func TestRunTaskScript_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Script: &Script{ + Language: "python", + Arguments: map[string]interface{}{ + "arg1": "value1", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + InlineCode: stringPtr("print('Hello, World!')"), + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }`, string(data)) +} + +func TestRunTaskScript_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "python", runTask.Run.Script.Language) + assert.Equal(t, map[string]interface{}{"arg1": "value1"}, runTask.Run.Script.Arguments) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Script.Environment) + assert.Equal(t, "print('Hello, World!')", *runTask.Run.Script.InlineCode) +} + +func boolPtr(b bool) *bool { + return &b +} + +func stringPtr(s string) *string { + return &s +} diff --git a/model/task_set.go b/model/task_set.go new file mode 100644 index 0000000..68816ba --- /dev/null +++ b/model/task_set.go @@ -0,0 +1,40 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SetTask represents a task used to set data. +type SetTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Set map[string]interface{} `json:"set" validate:"required,min=1,dive"` +} + +func (st *SetTask) GetBase() *TaskBase { + return &st.TaskBase +} + +// MarshalJSON for SetTask to ensure proper serialization. +func (st *SetTask) MarshalJSON() ([]byte, error) { + type Alias SetTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SetTask to ensure proper deserialization. +func (st *SetTask) UnmarshalJSON(data []byte) error { + type Alias SetTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_set_test.go b/model/task_set_test.go new file mode 100644 index 0000000..49781af --- /dev/null +++ b/model/task_set_test.go @@ -0,0 +1,104 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSetTask_MarshalJSON(t *testing.T) { + setTask := SetTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Set: map[string]interface{}{ + "key1": "value1", + "key2": 42, + }, + } + + data, err := json.Marshal(setTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }`, string(data)) +} + +func TestSetTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }` + + var setTask SetTask + err := json.Unmarshal([]byte(jsonData), &setTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, setTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, setTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, setTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, setTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, setTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, setTask.Metadata) + expectedSet := map[string]interface{}{ + "key1": "value1", + "key2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedSet, setTask.Set) +} + +func TestSetTask_Validation(t *testing.T) { + // Valid SetTask + setTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{ + "key": "value", + }, + } + assert.NoError(t, validate.Struct(setTask)) + + // Invalid SetTask (empty set) + invalidSetTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{}, + } + assert.Error(t, validate.Struct(invalidSetTask)) +} diff --git a/model/task_switch.go b/model/task_switch.go new file mode 100644 index 0000000..89ca9c1 --- /dev/null +++ b/model/task_switch.go @@ -0,0 +1,48 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SwitchTask represents a task configuration for conditional branching. +type SwitchTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Switch []SwitchItem `json:"switch" validate:"required,min=1,dive,switch_item"` +} + +func (st *SwitchTask) GetBase() *TaskBase { + return &st.TaskBase +} + +type SwitchItem map[string]SwitchCase + +// SwitchCase defines a condition and the corresponding outcome for a switch task. +type SwitchCase struct { + When *RuntimeExpression `json:"when,omitempty"` + Then *FlowDirective `json:"then" validate:"required"` +} + +// MarshalJSON for SwitchTask to ensure proper serialization. +func (st *SwitchTask) MarshalJSON() ([]byte, error) { + type Alias SwitchTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SwitchTask to ensure proper deserialization. +func (st *SwitchTask) UnmarshalJSON(data []byte) error { + type Alias SwitchTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_switch_test.go b/model/task_switch_test.go new file mode 100644 index 0000000..3c40b5a --- /dev/null +++ b/model/task_switch_test.go @@ -0,0 +1,151 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSwitchTask_MarshalJSON(t *testing.T) { + switchTask := &SwitchTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + { + "case2": SwitchCase{ + When: &RuntimeExpression{Value: "${condition2}"}, + Then: &FlowDirective{Value: "end"}, + }, + }, + }, + } + + data, err := json.Marshal(switchTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }`, string(data)) +} + +func TestSwitchTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }` + + var switchTask SwitchTask + err := json.Unmarshal([]byte(jsonData), &switchTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, switchTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, switchTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, switchTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, switchTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, switchTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, switchTask.Metadata) + assert.Equal(t, 2, len(switchTask.Switch)) + assert.Equal(t, &RuntimeExpression{Value: "${condition1}"}, switchTask.Switch[0]["case1"].When) + assert.Equal(t, &FlowDirective{Value: "next"}, switchTask.Switch[0]["case1"].Then) + assert.Equal(t, &RuntimeExpression{Value: "${condition2}"}, switchTask.Switch[1]["case2"].When) + assert.Equal(t, &FlowDirective{Value: "end"}, switchTask.Switch[1]["case2"].Then) +} + +func TestSwitchTask_Validation(t *testing.T) { + // Valid SwitchTask + switchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + }, + } + assert.NoError(t, validate.Struct(switchTask)) + + // Invalid SwitchTask (empty switch) + invalidSwitchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{}, + } + assert.Error(t, validate.Struct(invalidSwitchTask)) + + // Invalid SwitchTask (SwitchItem with multiple keys) + invalidSwitchItemTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{When: &RuntimeExpression{Value: "${condition1}"}, Then: &FlowDirective{Value: "next"}}, + "case2": SwitchCase{When: &RuntimeExpression{Value: "${condition2}"}, Then: &FlowDirective{Value: "end"}}, + }, + }, + } + assert.Error(t, validate.Struct(invalidSwitchItemTask)) +} diff --git a/model/task_test.go b/model/task_test.go new file mode 100644 index 0000000..fdd07cf --- /dev/null +++ b/model/task_test.go @@ -0,0 +1,188 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + validator "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestTaskList_UnmarshalJSON(t *testing.T) { + jsonData := `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}}]}} + ]` + + var taskList TaskList + err := json.Unmarshal([]byte(jsonData), &taskList) + assert.NoError(t, err) + assert.Equal(t, 2, len(taskList)) + + task1 := taskList.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := taskList.Key("task2").AsDoTask() + assert.NotNil(t, task2) + assert.Equal(t, 1, len(*task2.Do)) + + task3 := task2.Do.Key("task3").AsCallOpenAPITask() + assert.NotNil(t, task3) + assert.Equal(t, "openapi", task3.Call) + assert.Equal(t, "doc1", task3.With.Document.Name) + assert.Equal(t, "op1", task3.With.OperationID) +} + +func TestTaskList_MarshalJSON(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + data, err := json.Marshal(taskList) + assert.NoError(t, err) + assert.JSONEq(t, `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}}]}} + ]`, string(data)) +} + +func TestTaskList_Validation(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + // Validate each TaskItem explicitly + for _, taskItem := range taskList { + err := validate.Struct(taskItem) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Errorf("Validation failed on field '%s' with tag '%s'", validationErr.Field(), validationErr.Tag()) + } + } else { + t.Errorf("Unexpected error: %v", err) + } + } + } + +} + +func TestTaskList_Next_Sequential(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + &TaskItem{Key: "task3", Task: &SetTask{Set: map[string]interface{}{"key3": "value3"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task2", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task3", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_WithThenDirective(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "task3"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + &TaskItem{Key: "task3", Task: &SetTask{Set: map[string]interface{}{"key3": "value3"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task3", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_Termination(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "end"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_InvalidThenReference(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "unknown"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} diff --git a/model/task_try.go b/model/task_try.go new file mode 100644 index 0000000..57ba9df --- /dev/null +++ b/model/task_try.go @@ -0,0 +1,206 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TryTask struct { + TaskBase `json:",inline"` + Try *TaskList `json:"try" validate:"required,dive"` + Catch *TryTaskCatch `json:"catch" validate:"required"` +} + +func (t *TryTask) GetBase() *TaskBase { + return &t.TaskBase +} + +type TryTaskCatch struct { + Errors struct { + With *ErrorFilter `json:"with,omitempty"` + } `json:"errors,omitempty"` + As string `json:"as,omitempty"` + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Retry *RetryPolicy `json:"retry,omitempty"` + Do *TaskList `json:"do,omitempty" validate:"omitempty,dive"` +} + +// RetryPolicy defines a retry policy. +type RetryPolicy struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + Ref string `json:"-"` // Reference to a reusable retry policy +} + +// MarshalJSON for RetryPolicy to ensure proper serialization. +func (rp *RetryPolicy) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + }{ + When: rp.When, + ExceptWhen: rp.ExceptWhen, + Delay: rp.Delay, + Backoff: rp.Backoff, + Limit: rp.Limit, + Jitter: rp.Jitter, + }) +} + +// UnmarshalJSON for RetryPolicy to ensure proper deserialization. +func (rp *RetryPolicy) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy: %w", err) + } + + switch v := raw.(type) { + case string: + // If it's a string, treat it as a reference + rp.Ref = v + case map[string]interface{}: + // If it's an object, unmarshal into the struct + type Alias RetryPolicy + alias := &struct { + *Alias + }{ + Alias: (*Alias)(rp), + } + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy object: %w", err) + } + default: + return fmt.Errorf("invalid RetryPolicy type: %T", v) + } + + return nil +} + +func (rp *RetryPolicy) ResolveReference(retries map[string]*RetryPolicy) error { + if rp.Ref == "" { + // No reference to resolve + return nil + } + + resolved, exists := retries[rp.Ref] + if !exists { + return fmt.Errorf("retry policy reference %q not found", rp.Ref) + } + + // Copy resolved policy fields into the current RetryPolicy + *rp = *resolved + rp.Ref = "" // Clear the reference to avoid confusion + + return nil +} + +func ResolveRetryPolicies(tasks []TryTaskCatch, retries map[string]*RetryPolicy) error { + for i := range tasks { + if tasks[i].Retry != nil { + if err := tasks[i].Retry.ResolveReference(retries); err != nil { + return fmt.Errorf("failed to resolve retry policy for task %q: %w", tasks[i].As, err) + } + } + } + return nil +} + +// RetryBackoff defines the retry backoff strategies. +type RetryBackoff struct { + Constant *BackoffDefinition `json:"constant,omitempty"` + Exponential *BackoffDefinition `json:"exponential,omitempty"` + Linear *BackoffDefinition `json:"linear,omitempty"` +} + +// MarshalJSON for RetryBackoff to ensure oneOf behavior. +func (rb *RetryBackoff) MarshalJSON() ([]byte, error) { + switch { + case rb.Constant != nil: + return json.Marshal(map[string]interface{}{"constant": rb.Constant.Definition}) + case rb.Exponential != nil: + return json.Marshal(map[string]interface{}{"exponential": rb.Exponential.Definition}) + case rb.Linear != nil: + return json.Marshal(map[string]interface{}{"linear": rb.Linear.Definition}) + default: + return nil, errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") + } +} + +func (rb *RetryBackoff) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryBackoff: %w", err) + } + + if rawConstant, ok := raw["constant"]; ok { + rb.Constant = &BackoffDefinition{} + if err := json.Unmarshal(rawConstant, &rb.Constant.Definition); err != nil { + return fmt.Errorf("failed to unmarshal constant backoff: %w", err) + } + return nil + } + + if rawExponential, ok := raw["exponential"]; ok { + rb.Exponential = &BackoffDefinition{} + if err := json.Unmarshal(rawExponential, &rb.Exponential.Definition); err != nil { + return fmt.Errorf("failed to unmarshal exponential backoff: %w", err) + } + return nil + } + + if rawLinear, ok := raw["linear"]; ok { + rb.Linear = &BackoffDefinition{} + if err := json.Unmarshal(rawLinear, &rb.Linear.Definition); err != nil { + return fmt.Errorf("failed to unmarshal linear backoff: %w", err) + } + return nil + } + + return errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") +} + +type BackoffDefinition struct { + Definition map[string]interface{} `json:"definition,omitempty"` +} + +// RetryLimit defines the retry limit configurations. +type RetryLimit struct { + Attempt *RetryLimitAttempt `json:"attempt,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryLimitAttempt defines the limit for each retry attempt. +type RetryLimitAttempt struct { + Count int `json:"count,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryPolicyJitter defines the randomness or variability of retry delays. +type RetryPolicyJitter struct { + From *Duration `json:"from" validate:"required"` + To *Duration `json:"to" validate:"required"` +} diff --git a/model/task_try_test.go b/model/task_try_test.go new file mode 100644 index 0000000..4daf839 --- /dev/null +++ b/model/task_try_test.go @@ -0,0 +1,171 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRetryPolicy_MarshalJSON(t *testing.T) { + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{ + Definition: map[string]interface{}{"factor": 2}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + + data, err := json.Marshal(retryPolicy) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }`, string(data)) +} + +func TestRetryPolicy_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }` + + var retryPolicy RetryPolicy + err := json.Unmarshal([]byte(jsonData), &retryPolicy) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${someCondition}"}, retryPolicy.When) + assert.Equal(t, &RuntimeExpression{"${someOtherCondition}"}, retryPolicy.ExceptWhen) + assert.Equal(t, NewDurationExpr("PT5S"), retryPolicy.Delay) + assert.NotNil(t, retryPolicy.Backoff.Exponential) + assert.Equal(t, map[string]interface{}{"factor": float64(2)}, retryPolicy.Backoff.Exponential.Definition) + assert.Equal(t, 3, retryPolicy.Limit.Attempt.Count) + assert.Equal(t, NewDurationExpr("PT1M"), retryPolicy.Limit.Attempt.Duration) + assert.Equal(t, NewDurationExpr("PT10M"), retryPolicy.Limit.Duration) + assert.Equal(t, NewDurationExpr("PT1S"), retryPolicy.Jitter.From) + assert.Equal(t, NewDurationExpr("PT3S"), retryPolicy.Jitter.To) +} + +func TestRetryPolicy_Validation(t *testing.T) { + // Valid RetryPolicy + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Constant: &BackoffDefinition{ + Definition: map[string]interface{}{"delay": 5}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + assert.NoError(t, validate.Struct(retryPolicy)) + + // Invalid RetryPolicy (missing required fields in Jitter) + invalidRetryPolicy := RetryPolicy{ + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + }, + } + assert.Error(t, validate.Struct(invalidRetryPolicy)) +} + +func TestRetryPolicy_UnmarshalJSON_WithReference(t *testing.T) { + retries := map[string]*RetryPolicy{ + "default": { + Delay: &Duration{DurationInline{Seconds: 3}}, + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{}, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{Count: 5}, + }, + }, + } + + jsonData := `{ + "retry": "default" + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + // Resolve the reference + err = task.Retry.ResolveReference(retries) + assert.NoError(t, err) + + assert.Equal(t, retries["default"].Delay, task.Retry.Delay) + assert.Equal(t, retries["default"].Backoff, task.Retry.Backoff) + assert.Equal(t, retries["default"].Limit, task.Retry.Limit) +} + +func TestRetryPolicy_UnmarshalJSON_Inline(t *testing.T) { + jsonData := `{ + "retry": { + "delay": { "seconds": 3 }, + "backoff": { "exponential": {} }, + "limit": { "attempt": { "count": 5 } } + } + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + assert.NotNil(t, task.Retry) + assert.Equal(t, int32(3), task.Retry.Delay.AsInline().Seconds) + assert.NotNil(t, task.Retry.Backoff.Exponential) + assert.Equal(t, 5, task.Retry.Limit.Attempt.Count) +} diff --git a/model/task_wait.go b/model/task_wait.go new file mode 100644 index 0000000..e312824 --- /dev/null +++ b/model/task_wait.go @@ -0,0 +1,72 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// WaitTask represents a task configuration to delay execution for a specified duration. +type WaitTask struct { + TaskBase `json:",inline"` + Wait *Duration `json:"wait" validate:"required"` +} + +func (wt *WaitTask) GetBase() *TaskBase { + return &wt.TaskBase +} + +// MarshalJSON for WaitTask to ensure proper serialization. +func (wt *WaitTask) MarshalJSON() ([]byte, error) { + type Alias WaitTask + waitData, err := json.Marshal(wt.Wait) + if err != nil { + return nil, err + } + + alias := struct { + Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (Alias)(*wt), + Wait: waitData, + } + + return json.Marshal(alias) +} + +// UnmarshalJSON for WaitTask to ensure proper deserialization. +func (wt *WaitTask) UnmarshalJSON(data []byte) error { + type Alias WaitTask + alias := struct { + *Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (*Alias)(wt), + } + + // Unmarshal data into alias + if err := json.Unmarshal(data, &alias); err != nil { + return fmt.Errorf("failed to unmarshal WaitTask: %w", err) + } + + // Unmarshal Wait field + if err := json.Unmarshal(alias.Wait, &wt.Wait); err != nil { + return fmt.Errorf("failed to unmarshal Wait field: %w", err) + } + + return nil +} diff --git a/model/task_wait_test.go b/model/task_wait_test.go new file mode 100644 index 0000000..6dda965 --- /dev/null +++ b/model/task_wait_test.go @@ -0,0 +1,88 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestWaitTask_MarshalJSON(t *testing.T) { + waitTask := &WaitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Wait: NewDurationExpr("P1DT1H"), + } + + data, err := json.Marshal(waitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }`, string(data)) +} + +func TestWaitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }` + + waitTask := &WaitTask{} + err := json.Unmarshal([]byte(jsonData), waitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, waitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, waitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, waitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, waitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, waitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, waitTask.Metadata) + assert.Equal(t, NewDurationExpr("P1DT1H"), waitTask.Wait) +} + +func TestWaitTask_Validation(t *testing.T) { + // Valid WaitTask + waitTask := &WaitTask{ + TaskBase: TaskBase{}, + Wait: NewDurationExpr("P1DT1H"), + } + assert.NoError(t, validate.Struct(waitTask)) + + // Invalid WaitTask (empty wait) + invalidWaitTask := &WaitTask{ + TaskBase: TaskBase{}, + } + assert.Error(t, validate.Struct(invalidWaitTask)) +} diff --git a/model/timeout.go b/model/timeout.go new file mode 100644 index 0000000..dd63af8 --- /dev/null +++ b/model/timeout.go @@ -0,0 +1,232 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// Timeout specifies a time limit for tasks or workflows. +type Timeout struct { + // After The duration after which to timeout + After *Duration `json:"after" validate:"required"` +} + +// UnmarshalJSON implements custom unmarshalling for Timeout. +func (t *Timeout) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + // Check if "after" key exists + afterData, ok := raw["after"] + if !ok { + return errors.New("missing 'after' key in Timeout JSON") + } + + // Unmarshal "after" using the Duration type + if err := json.Unmarshal(afterData, &t.After); err != nil { + return err + } + + return nil +} + +// MarshalJSON implements custom marshalling for Timeout. +func (t *Timeout) MarshalJSON() ([]byte, error) { + // Check the type of t.After.Value + switch v := t.After.Value.(type) { + case DurationInline: + // Serialize inline duration + return json.Marshal(map[string]interface{}{ + "after": v, + }) + case DurationExpression: + // Serialize expression as a simple string + return json.Marshal(map[string]string{ + "after": v.Expression, + }) + case string: + // Handle direct string values as DurationExpression + return json.Marshal(map[string]string{ + "after": v, + }) + default: + return nil, errors.New("unknown Duration type in Timeout") + } +} + +// TimeoutOrReference handles either a Timeout definition or a reference (string). +type TimeoutOrReference struct { + Timeout *Timeout `json:"-" validate:"required_without=Ref"` + Reference *string `json:"-" validate:"required_without=Timeout"` +} + +// UnmarshalJSON implements custom unmarshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) UnmarshalJSON(data []byte) error { + // Attempt to unmarshal as a Timeout + var asTimeout Timeout + if err := json.Unmarshal(data, &asTimeout); err == nil { + tr.Timeout = &asTimeout + tr.Reference = nil + return nil + } + + // Attempt to unmarshal as a string (reference) + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + tr.Reference = &asString + tr.Timeout = nil + return nil + } + + // If neither works, return an error + return errors.New("invalid TimeoutOrReference: must be a Timeout or a string reference") +} + +// MarshalJSON implements custom marshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) MarshalJSON() ([]byte, error) { + // Marshal as a Timeout if present + if tr.Timeout != nil { + return json.Marshal(tr.Timeout) + } + + // Marshal as a string reference if present + if tr.Reference != nil { + return json.Marshal(tr.Reference) + } + + return nil, errors.New("invalid TimeoutOrReference: neither Timeout nor Ref is set") +} + +// Duration represents a flexible duration that can be either inline or an ISO 8601 expression. +type Duration struct { + Value interface{} `json:"-"` +} + +// NewDurationExpr accessor to create a Duration object from a string +func NewDurationExpr(durationExpression string) *Duration { + return &Duration{DurationExpression{durationExpression}} +} + +func (d *Duration) AsExpression() string { + switch v := d.Value.(type) { + case string: + return v + case DurationExpression: + return v.String() + default: + return "" + } +} + +func (d *Duration) AsInline() *DurationInline { + switch v := d.Value.(type) { + case DurationInline: + return &v + default: + return nil + } +} + +// UnmarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) UnmarshalJSON(data []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err == nil { + validKeys := map[string]bool{"days": true, "hours": true, "minutes": true, "seconds": true, "milliseconds": true} + for key := range raw { + if !validKeys[key] { + return fmt.Errorf("unexpected key '%s' in duration object", key) + } + } + + inline := DurationInline{} + if err := json.Unmarshal(data, &inline); err != nil { + return fmt.Errorf("failed to unmarshal DurationInline: %w", err) + } + d.Value = inline + return nil + } + + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + d.Value = DurationExpression{Expression: asString} + return nil + } + + return errors.New("data must be a valid duration string or object") +} + +// MarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) MarshalJSON() ([]byte, error) { + switch v := d.Value.(type) { + case DurationInline: + return json.Marshal(v) + case DurationExpression: + return json.Marshal(v.Expression) + case string: + durationExpression := &DurationExpression{Expression: v} + return json.Marshal(durationExpression) + default: + return nil, errors.New("unknown Duration type") + } +} + +// DurationInline represents the inline definition of a duration. +type DurationInline struct { + Days int32 `json:"days,omitempty"` + Hours int32 `json:"hours,omitempty"` + Minutes int32 `json:"minutes,omitempty"` + Seconds int32 `json:"seconds,omitempty"` + Milliseconds int32 `json:"milliseconds,omitempty"` +} + +// MarshalJSON for DurationInline. +func (d *DurationInline) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "days": d.Days, + "hours": d.Hours, + "minutes": d.Minutes, + "seconds": d.Seconds, + "milliseconds": d.Milliseconds, + }) +} + +// DurationExpression represents the ISO 8601 expression of a duration. +type DurationExpression struct { + Expression string `json:"-" validate:"required,iso8601_duration"` +} + +func (d *DurationExpression) String() string { + return d.Expression +} + +// MarshalJSON for DurationExpression. +func (d *DurationExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(d.Expression) +} + +// UnmarshalJSON for DurationExpression to handle ISO 8601 strings. +func (d *DurationExpression) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err != nil { + return err + } + d.Expression = asString + return nil +} diff --git a/model/timeout_test.go b/model/timeout_test.go new file mode 100644 index 0000000..ae17555 --- /dev/null +++ b/model/timeout_test.go @@ -0,0 +1,228 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTimeout_UnmarshalJSON(t *testing.T) { + // Test cases for Timeout unmarshalling + tests := []struct { + name string + jsonStr string + expect *Timeout + err bool + }{ + { + name: "Valid inline duration", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + err: false, + }, + { + name: "Valid ISO 8601 duration", + jsonStr: `{"after": "P1Y2M3DT4H5M6S"}`, + expect: &Timeout{ + After: NewDurationExpr("P1Y2M3DT4H5M6S"), + }, + err: false, + }, + { + name: "Invalid duration type", + jsonStr: `{"after": {"unknown": "value"}}`, + expect: nil, + err: true, + }, + { + name: "Missing after key", + jsonStr: `{}`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var timeout Timeout + err := json.Unmarshal([]byte(test.jsonStr), &timeout) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &timeout) + } + }) + } +} + +func TestTimeout_MarshalJSON(t *testing.T) { + tests := []struct { + name string + input *Timeout + expected string + wantErr bool + }{ + { + name: "ISO 8601 Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationExpression{Expression: "PT1H"}, + }, + }, + expected: `{"after":"PT1H"}`, + wantErr: false, + }, + { + name: "Inline Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationInline{ + Days: 1, + Hours: 2, + Minutes: 30, + }, + }, + }, + expected: `{"after":{"days":1,"hours":2,"minutes":30}}`, + wantErr: false, + }, + { + name: "Invalid Duration", + input: &Timeout{After: &Duration{Value: 123}}, + expected: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := json.Marshal(tt.input) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, tt.expected, string(data)) + } + }) + } +} + +func TestTimeoutOrReference_UnmarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference unmarshalling + tests := []struct { + name string + jsonStr string + expect *TimeoutOrReference + err bool + }{ + { + name: "Valid Timeout", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + err: false, + }, + { + name: "Valid Ref", + jsonStr: `"some-timeout-reference"`, + expect: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + err: false, + }, + { + name: "Invalid JSON", + jsonStr: `{"invalid": }`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var tor TimeoutOrReference + err := json.Unmarshal([]byte(test.jsonStr), &tor) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &tor) + } + }) + } +} + +func ptrString(s string) *string { + return &s +} + +func TestTimeoutOrReference_MarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference marshalling + tests := []struct { + name string + input *TimeoutOrReference + expect string + err bool + }{ + { + name: "Valid Timeout", + input: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + expect: `{"after":{"days":1,"hours":2}}`, + err: false, + }, + { + name: "Valid Ref", + input: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + expect: `"some-timeout-reference"`, + err: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.input) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expect, string(data)) + } + }) + } +} diff --git a/model/util.go b/model/util.go deleted file mode 100644 index 1cfd08b..0000000 --- a/model/util.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2020 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "bytes" - "encoding/json" - "net/http" - "os" - "path/filepath" - "strings" -) - -const prefix = "file:/" - -// TRUE used by bool fields that needs a boolean pointer -var TRUE = true - -// FALSE used by bool fields that needs a boolean pointer -var FALSE = false - -func getBytesFromFile(s string) (b []byte, err error) { - // #nosec - if resp, err := http.Get(s); err == nil { - defer resp.Body.Close() - buf := new(bytes.Buffer) - if _, err = buf.ReadFrom(resp.Body); err != nil { - return nil, err - } - return buf.Bytes(), nil - } - if strings.HasPrefix(s, prefix) { - s = strings.TrimPrefix(s, prefix) - } else if s, err = filepath.Abs(s); err != nil { - return nil, err - } - if b, err = os.ReadFile(filepath.Clean(s)); err != nil { - return nil, err - } - return b, nil -} - -func requiresNotNilOrEmpty(value interface{}) string { - if value == nil { - return "" - } - return value.(string) -} - -func unmarshalString(data []byte) (string, error) { - var value string - if err := json.Unmarshal(data, &value); err != nil { - return "", err - } - return value, nil -} - -func unmarshalKey(key string, data map[string]json.RawMessage, output interface{}) error { - if _, found := data[key]; found { - if err := json.Unmarshal(data[key], output); err != nil { - return err - } - } - return nil -} - -// unmarshalFile same as calling unmarshalString following by getBytesFromFile. -// Assumes that the value inside `data` is a path to a known location. -// Returns the content of the file or a not nil error reference. -func unmarshalFile(data []byte) (b []byte, err error) { - filePath, err := unmarshalString(data) - if err != nil { - return nil, err - } - file, err := getBytesFromFile(filePath) - if err != nil { - return nil, err - } - return file, nil -} diff --git a/model/validator.go b/model/validator.go new file mode 100644 index 0000000..60b87b8 --- /dev/null +++ b/model/validator.go @@ -0,0 +1,390 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "errors" + "fmt" + "regexp" + "strings" + + validator "github.com/go-playground/validator/v10" +) + +var ( + iso8601DurationPattern = regexp.MustCompile(`^P(\d+Y)?(\d+M)?(\d+D)?(T(\d+H)?(\d+M)?(\d+S)?)?$`) + semanticVersionPattern = regexp.MustCompile(`^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) + hostnameRFC1123Pattern = regexp.MustCompile(`^(([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)*[a-zA-Z]{2,63}|[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)$`) +) + +var validate *validator.Validate + +func registerValidator(tag string, fn validator.Func) { + + if err := validate.RegisterValidation(tag, fn); err != nil { + panic(fmt.Sprintf("Failed to register validator '%s': %v", tag, err)) + } +} + +func init() { + validate = validator.New() + + registerValidator("basic_policy", validateBasicPolicy) + registerValidator("bearer_policy", validateBearerPolicy) + registerValidator("digest_policy", validateDigestPolicy) + registerValidator("oauth2_policy", validateOAuth2Policy) + registerValidator("client_auth_type", validateOptionalOAuthClientAuthentication) + registerValidator("encoding_type", validateOptionalOAuth2TokenRequestEncoding) + + registerValidator("hostname_rfc1123", func(fl validator.FieldLevel) bool { + return isHostnameValid(fl.Field().String()) + }) + registerValidator("uri_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriPattern.MatchString(value) + }) + registerValidator("uri_template_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriTemplatePattern.MatchString(value) + }) + registerValidator("semver_pattern", validateSemanticVersion) + registerValidator("iso8601_duration", validateISO8601Duration) + + registerValidator("object_or_string", validateObjectOrString) + registerValidator("object_or_runtime_expr", validateObjectOrRuntimeExpr) + registerValidator("string_or_runtime_expr", validateStringOrRuntimeExpr) + registerValidator("uri_template_or_runtime_expr", validateURITemplateOrRuntimeExpr) + registerValidator("json_pointer_or_runtime_expr", validateJsonPointerOrRuntimeExpr) + + registerValidator("switch_item", validateSwitchItem) + validate.RegisterStructValidation(validateTaskItem, TaskItem{}) +} + +func GetValidator() *validator.Validate { + return validate +} + +// validateTaskItem is a struct-level validation function for TaskItem. +func validateTaskItem(sl validator.StructLevel) { + taskItem := sl.Current().Interface().(TaskItem) + + // Validate Key + if taskItem.Key == "" { + sl.ReportError(taskItem.Key, "Key", "Key", "required", "") + return + } + + // Validate Task is not nil + if taskItem.Task == nil { + sl.ReportError(taskItem.Task, "Task", "Task", "required", "") + return + } + + // Validate the concrete type of Task and capture nested errors + switch t := taskItem.Task.(type) { + case *CallHTTP: + validateConcreteTask(sl, t, "Task") + case *CallOpenAPI: + validateConcreteTask(sl, t, "Task") + case *CallGRPC: + validateConcreteTask(sl, t, "Task") + case *CallAsyncAPI: + validateConcreteTask(sl, t, "Task") + case *CallFunction: + validateConcreteTask(sl, t, "Task") + case *DoTask: + validateConcreteTask(sl, t, "Task") + case *ForkTask: + validateConcreteTask(sl, t, "Task") + case *EmitTask: + validateConcreteTask(sl, t, "Task") + case *ForTask: + validateConcreteTask(sl, t, "Task") + case *ListenTask: + validateConcreteTask(sl, t, "Task") + case *RaiseTask: + validateConcreteTask(sl, t, "Task") + case *RunTask: + validateConcreteTask(sl, t, "Task") + case *SetTask: + validateConcreteTask(sl, t, "Task") + case *SwitchTask: + validateConcreteTask(sl, t, "Task") + case *TryTask: + validateConcreteTask(sl, t, "Task") + case *WaitTask: + validateConcreteTask(sl, t, "Task") + default: + sl.ReportError(taskItem.Task, "Task", "Task", "unknown_task", "unrecognized task type") + } +} + +// validateConcreteTask validates a concrete Task type and reports nested errors. +func validateConcreteTask(sl validator.StructLevel, task interface{}, fieldName string) { + err := validate.Struct(task) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, ve := range validationErrors { + // Report only nested fields to avoid duplicates + if ve.Namespace() != fieldName { + sl.ReportError(ve.Value(), fieldName+"."+ve.StructNamespace(), ve.StructField(), ve.Tag(), ve.Param()) + } + } + } + } +} + +// func validateSwitchItem(fl validator.FieldLevel) bool { is a custom validation function for SwitchItem. +func validateSwitchItem(fl validator.FieldLevel) bool { + switchItem, ok := fl.Field().Interface().(SwitchItem) + if !ok { + return false + } + return len(switchItem) == 1 +} + +// validateBasicPolicy ensures BasicAuthenticationPolicy has mutually exclusive fields set. +func validateBasicPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BasicAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +// validateBearerPolicy ensures BearerAuthenticationPolicy has mutually exclusive fields set. +func validateBearerPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BearerAuthenticationPolicy) + if !ok { + return false + } + if policy.Token != "" && policy.Use != "" { + return false + } + return true +} + +// validateDigestPolicy ensures DigestAuthenticationPolicy has mutually exclusive fields set. +func validateDigestPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(DigestAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +func validateOAuth2Policy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(OAuth2AuthenticationPolicy) + if !ok { + return false + } + + if (policy.Properties != nil || policy.Endpoints != nil) && policy.Use != "" { + return false // Both fields are set, invalid + } + if policy.Properties == nil && policy.Use == "" { + return false // Neither field is set, invalid + } + return true +} + +// validateOptionalOAuthClientAuthentication checks if the given value is a valid OAuthClientAuthenticationType. +func validateOptionalOAuthClientAuthentication(fl validator.FieldLevel) bool { + value := fl.Field().String() + + if len(value) == 0 { + return true + } + switch OAuthClientAuthenticationType(value) { + case + OAuthClientAuthClientSecretBasic, + OAuthClientAuthClientSecretPost, + OAuthClientAuthClientSecretJWT, + OAuthClientAuthPrivateKeyJWT, + OAuthClientAuthNone: + return true + default: + return false + } +} + +func validateOptionalOAuth2TokenRequestEncoding(fl validator.FieldLevel) bool { + value := fl.Field().String() + + // Allow empty fields (optional case) + if value == "" { + return true + } + + // Validate against allowed constants + switch OAuth2TokenRequestEncodingType(value) { + case + EncodingTypeFormUrlEncoded, + EncodingTypeApplicationJson: + return true + default: + return false + } +} + +func validateObjectOrString(fl validator.FieldLevel) bool { + // Access the "Value" field + value := fl.Field().Interface() + + // Validate based on the type of "Value" + switch v := value.(type) { + case string: + return v != "" // Validate non-empty strings. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Reject unsupported types. + } +} + +func validateObjectOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Unsupported types. + } +} + +func validateStringOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case string: + return v != "" // Validate non-empty strings. + default: + return false // Unsupported types. + } +} + +func validateURITemplateOrRuntimeExpr(fl validator.FieldLevel) bool { + value := fl.Field().Interface() + + // Handle nil or empty values when 'omitempty' is used + if value == nil { + return true + } + + switch v := value.(type) { + case LiteralUri: + return LiteralUriPattern.MatchString(v.String()) + case LiteralUriTemplate: + return LiteralUriTemplatePattern.MatchString(v.String()) + case RuntimeExpression: + return v.IsValid() + case string: + // Check if the string is a valid URI + if LiteralUriPattern.MatchString(v) { + return true + } + + // Check if the string is a valid URI Template + if LiteralUriTemplatePattern.MatchString(v) { + return true + } + + // Check if the string is a valid RuntimeExpression + expression := RuntimeExpression{Value: v} + return expression.IsValid() + default: + fmt.Printf("Unsupported type in URITemplateOrRuntimeExpr.Value: %T\n", v) + return false + } +} + +func validateJsonPointerOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case string: // JSON Pointer + return JSONPointerPattern.MatchString(v) + case RuntimeExpression: + return v.IsValid() + default: + return false // Unsupported types. + } +} + +func validateISO8601Duration(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isISO8601DurationValid(input) +} + +func validateSemanticVersion(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isSemanticVersionValid(input) +} + +// isISO8601DurationValid validates if a string is a valid ISO 8601 duration. +func isISO8601DurationValid(input string) bool { + if !iso8601DurationPattern.MatchString(input) { + return false + } + + trimmed := strings.TrimPrefix(input, "P") + if trimmed == "" || trimmed == "T" { + return false + } + + return true +} + +// isSemanticVersionValid validates if a string is a valid semantic version. +func isSemanticVersionValid(input string) bool { + return semanticVersionPattern.MatchString(input) +} + +// isHostnameValid validates if a string is a valid RFC 1123 hostname. +func isHostnameValid(input string) bool { + return hostnameRFC1123Pattern.MatchString(input) +} diff --git a/model/validator_test.go b/model/validator_test.go new file mode 100644 index 0000000..6607369 --- /dev/null +++ b/model/validator_test.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func TestRegexValidators(t *testing.T) { + testCases := []struct { + name string + validate func(string) bool + input string + expected bool + }{ + // ISO 8601 Duration Tests + {"ISO 8601 Duration Valid 1", isISO8601DurationValid, "P2Y", true}, + {"ISO 8601 Duration Valid 2", isISO8601DurationValid, "P1DT12H30M", true}, + {"ISO 8601 Duration Valid 3", isISO8601DurationValid, "P1Y2M3D", true}, + {"ISO 8601 Duration Valid 4", isISO8601DurationValid, "P1Y2M3D4H", false}, + {"ISO 8601 Duration Valid 5", isISO8601DurationValid, "P1Y", true}, + {"ISO 8601 Duration Valid 6", isISO8601DurationValid, "PT1H", true}, + {"ISO 8601 Duration Valid 7", isISO8601DurationValid, "P1Y2M3D4H5M6S", false}, + {"ISO 8601 Duration Invalid 1", isISO8601DurationValid, "P", false}, + {"ISO 8601 Duration Invalid 2", isISO8601DurationValid, "P1Y2M3D4H5M6S7", false}, + {"ISO 8601 Duration Invalid 3", isISO8601DurationValid, "1Y", false}, + + // Semantic Versioning Tests + {"Semantic Version Valid 1", isSemanticVersionValid, "1.0.0", true}, + {"Semantic Version Valid 2", isSemanticVersionValid, "1.2.3", true}, + {"Semantic Version Valid 3", isSemanticVersionValid, "1.2.3-beta", true}, + {"Semantic Version Valid 4", isSemanticVersionValid, "1.2.3-beta.1", true}, + {"Semantic Version Valid 5", isSemanticVersionValid, "1.2.3-beta.1+build.123", true}, + {"Semantic Version Invalid 1", isSemanticVersionValid, "v1.2.3", false}, + {"Semantic Version Invalid 2", isSemanticVersionValid, "1.2", false}, + {"Semantic Version Invalid 3", isSemanticVersionValid, "1.2.3-beta.x", true}, + + // RFC 1123 Hostname Tests + {"RFC 1123 Hostname Valid 1", isHostnameValid, "example.com", true}, + {"RFC 1123 Hostname Valid 2", isHostnameValid, "my-hostname", true}, + {"RFC 1123 Hostname Valid 3", isHostnameValid, "subdomain.example.com", true}, + {"RFC 1123 Hostname Invalid 1", isHostnameValid, "127.0.0.1", false}, + {"RFC 1123 Hostname Invalid 2", isHostnameValid, "example.com.", false}, + {"RFC 1123 Hostname Invalid 3", isHostnameValid, "example..com", false}, + {"RFC 1123 Hostname Invalid 4", isHostnameValid, "example.com-", false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := tc.validate(tc.input) + if result != tc.expected { + t.Errorf("Validation failed for '%s': input='%s', expected=%v, got=%v", tc.name, tc.input, tc.expected, result) + } + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 986e497..15dba7e 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -1,10 +1,10 @@ -// Copyright 2021 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -16,686 +16,239 @@ package model import ( "encoding/json" + "errors" "fmt" ) -const ( - // DefaultExpressionLang ... - DefaultExpressionLang = "jq" - // ActionModeSequential ... - ActionModeSequential ActionMode = "sequential" - // ActionModeParallel ... - ActionModeParallel ActionMode = "parallel" - // UnlimitedTimeout description for unlimited timeouts - UnlimitedTimeout = "unlimited" -) - -var actionsModelMapping = map[string]func(state map[string]interface{}) State{ - StateTypeDelay: func(map[string]interface{}) State { return &DelayState{} }, - StateTypeEvent: func(map[string]interface{}) State { return &EventState{} }, - StateTypeOperation: func(map[string]interface{}) State { return &OperationState{} }, - StateTypeParallel: func(map[string]interface{}) State { return &ParallelState{} }, - StateTypeSwitch: func(s map[string]interface{}) State { - if _, ok := s["dataConditions"]; ok { - return &DataBasedSwitchState{} - } - return &EventBasedSwitchState{} - }, - StateTypeInject: func(map[string]interface{}) State { return &InjectState{} }, - StateTypeForEach: func(map[string]interface{}) State { return &ForEachState{} }, - StateTypeCallback: func(map[string]interface{}) State { return &CallbackState{} }, - StateTypeSleep: func(map[string]interface{}) State { return &SleepState{} }, -} - -// ActionMode ... -type ActionMode string - -// BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces -// to make it easy for custom unmarshalers implementations to unmarshal the common data structure. -type BaseWorkflow struct { - // Workflow unique identifier - ID string `json:"id" validate:"omitempty,min=1"` - // Key Domain-specific workflow identifier - Key string `json:"key,omitempty" validate:"omitempty,min=1"` - // Workflow name - Name string `json:"name" validate:"required"` - // Workflow description - Description string `json:"description,omitempty"` - // Workflow version - Version string `json:"version" validate:"omitempty,min=1"` - Start *Start `json:"start" validate:"required"` - // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important qualities - Annotations []string `json:"annotations,omitempty"` - // DataInputSchema URI of the JSON Schema used to validate the workflow data input - DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` - // Serverless Workflow schema version - SpecVersion string `json:"specVersion,omitempty" validate:"required"` - // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. - Secrets Secrets `json:"secrets,omitempty"` - // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. - Constants *Constants `json:"constants,omitempty"` - // Identifies the expression language used for workflow expressions. Default is 'jq' - ExpressionLang string `json:"expressionLang,omitempty" validate:"omitempty,min=1"` - // Timeouts definition for Workflow, State, Action, Branch, and Event consumption. - Timeouts *Timeouts `json:"timeouts,omitempty"` - // Errors declarations for this Workflow definition - Errors []Error `json:"errors,omitempty"` - // If 'true', workflow instances is not terminated when there are no active execution paths. Instance can be terminated via 'terminate end definition' or reaching defined 'execTimeout' - KeepActive bool `json:"keepActive,omitempty"` - // Metadata custom information shared with the runtime - Metadata Metadata `json:"metadata,omitempty"` - // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false - AutoRetries bool `json:"autoRetries,omitempty"` - // Auth definitions can be used to define authentication information that should be applied to resources defined in the operation - // property of function definitions. It is not used as authentication information for the function invocation, - // but just to access the resource containing the function invocation information. - Auth AuthDefinitions `json:"auth,omitempty"` -} - -// Workflow base definition +// Workflow represents the root structure of a workflow. type Workflow struct { - BaseWorkflow - States []State `json:"states" validate:"required,min=1"` - Events []Event `json:"events,omitempty"` - Functions []Function `json:"functions,omitempty"` - Retries []Retry `json:"retries,omitempty"` + Document Document `json:"document" yaml:"document" validate:"required"` + Input *Input `json:"input,omitempty" yaml:"input,omitempty"` + Use *Use `json:"use,omitempty" yaml:"use"` + Do *TaskList `json:"do" yaml:"do" validate:"required,dive"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" yaml:"timeout,omitempty"` + Output *Output `json:"output,omitempty" yaml:"output,omitempty"` + Schedule *Schedule `json:"schedule,omitempty" yaml:"schedule,omitempty"` } -// UnmarshalJSON implementation for json Unmarshal function for the Workflow type -func (w *Workflow) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &w.BaseWorkflow); err != nil { - return err - } - - workflowMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &workflowMap); err != nil { - return err - } - var rawStates []json.RawMessage - if err := json.Unmarshal(workflowMap["states"], &rawStates); err != nil { - return err - } - - w.States = make([]State, len(rawStates)) - mapState := map[string]interface{}{} - for i, rawState := range rawStates { - if err := json.Unmarshal(rawState, &mapState); err != nil { - return err - } - if _, ok := actionsModelMapping[mapState["type"].(string)]; !ok { - return fmt.Errorf("state %s not supported", mapState["type"]) - } - state := actionsModelMapping[mapState["type"].(string)](mapState) - if err := json.Unmarshal(rawState, &state); err != nil { - return err - } - w.States[i] = state - mapState = map[string]interface{}{} +// AsMap converts the Workflow struct into a JSON Map object. +func (w *Workflow) AsMap() (map[string]interface{}, error) { + jsonBytes, err := json.Marshal(w) + if err != nil { + return nil, err } - if _, ok := workflowMap["events"]; ok { - if err := json.Unmarshal(workflowMap["events"], &w.Events); err != nil { - var s string - if err := json.Unmarshal(workflowMap["events"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - m := make(map[string][]Event) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Events = m["events"] - } - } - if _, ok := workflowMap["functions"]; ok { - if err := json.Unmarshal(workflowMap["functions"], &w.Functions); err != nil { - var s string - if err := json.Unmarshal(workflowMap["functions"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - m := make(map[string][]Function) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Functions = m["functions"] - } - } - if _, ok := workflowMap["retries"]; ok { - if err := json.Unmarshal(workflowMap["retries"], &w.Retries); err != nil { - var s string - if err := json.Unmarshal(workflowMap["retries"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - m := make(map[string][]Retry) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Retries = m["retries"] - } - } - if _, ok := workflowMap["errors"]; ok { - if err := json.Unmarshal(workflowMap["errors"], &w.Errors); err != nil { - nestedData, err := unmarshalFile(workflowMap["errors"]) - if err != nil { - return err - } - m := make(map[string][]Error) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Errors = m["errors"] - } - } - w.setDefaults() - return nil -} -func (w *Workflow) setDefaults() { - if len(w.ExpressionLang) == 0 { - w.ExpressionLang = DefaultExpressionLang + var m map[string]interface{} + if err = json.Unmarshal(jsonBytes, &m); err != nil { + return nil, err } + return m, nil } -// WorkflowRef holds a reference for a workflow definition -type WorkflowRef struct { - // Sub-workflow unique id - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - Version string `json:"version,omitempty"` -} - -// UnmarshalJSON ... -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - subflowRef := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &subflowRef); err != nil { - s.WorkflowID, err = unmarshalString(data) - if err != nil { - return err - } - return nil +func (w *Workflow) MarshalYAML() (interface{}, error) { + // Create a map to hold fields + data := map[string]interface{}{ + "document": w.Document, } - if err := unmarshalKey("version", subflowRef, &s.Version); err != nil { - return err - } - if err := unmarshalKey("workflowId", subflowRef, &s.WorkflowID); err != nil { - return err - } - - return nil -} -// Timeouts ... -type Timeouts struct { - // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' - WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` - // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format) - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` - // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format) - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` -} - -// UnmarshalJSON ... -func (t *Timeouts) UnmarshalJSON(data []byte) error { - timeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &timeout); err != nil { - // assumes it's a reference to a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &t); err != nil { - return err - } - return nil + // Conditionally add fields + if w.Input != nil { + data["input"] = w.Input } - if err := unmarshalKey("workflowExecTimeout", timeout, &t.WorkflowExecTimeout); err != nil { - return err + if w.Use != nil { + data["use"] = w.Use } - if err := unmarshalKey("stateExecTimeout", timeout, &t.StateExecTimeout); err != nil { - return err + data["do"] = w.Do + if w.Timeout != nil { + data["timeout"] = w.Timeout } - if err := unmarshalKey("actionExecTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err + if w.Output != nil { + data["output"] = w.Output } - if err := unmarshalKey("branchExecTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err - } - if err := unmarshalKey("eventTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err + if w.Schedule != nil { + data["schedule"] = w.Schedule } - return nil + return data, nil } -// WorkflowExecTimeout ... -type WorkflowExecTimeout struct { - // Duration Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' - Duration string `json:"duration,omitempty" validate:"omitempty,min=1"` - // If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is abrupted. - Interrupt bool `json:"interrupt,omitempty"` - // Name of a workflow state to be executed before workflow instance is terminated - RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` +// Document holds metadata for the workflow. +type Document struct { + DSL string `json:"dsl" yaml:"dsl" validate:"required,semver_pattern"` + Namespace string `json:"namespace" yaml:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" yaml:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" yaml:"version" validate:"required,semver_pattern"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + Tags map[string]string `json:"tags,omitempty" yaml:"tags,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty" yaml:"metadata,omitempty"` } -// UnmarshalJSON ... -func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { - execTimeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &execTimeout); err != nil { - w.Duration, err = unmarshalString(data) - if err != nil { - return err - } - } else { - if err := unmarshalKey("duration", execTimeout, &w.Duration); err != nil { - return err - } - if err := unmarshalKey("interrupt", execTimeout, &w.Interrupt); err != nil { - return err - } - if err := unmarshalKey("runBefore", execTimeout, &w.RunBefore); err != nil { - return err - } - } - if len(w.Duration) == 0 { - w.Duration = UnlimitedTimeout - } - return nil +// Input Configures the workflow's input. +type Input struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + From *ObjectOrRuntimeExpr `json:"from,omitempty" validate:"omitempty"` } -// StateExecTimeout ... -type StateExecTimeout struct { - // Single state execution timeout, not including retries (ISO 8601 duration format) - Single string `json:"single,omitempty" validate:"omitempty,min=1"` - // Total state execution timeout, including retries (ISO 8601 duration format) - Total string `json:"total" validate:"required"` +// Output Configures the output of a workflow or task. +type Output struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -// UnmarshalJSON ... -func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - stateTimeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &stateTimeout); err != nil { - s.Total, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("total", stateTimeout, &s.Total); err != nil { - return err - } - if err := unmarshalKey("single", stateTimeout, &s.Single); err != nil { - return err - } - return nil +// Export Set the content of the context. +type Export struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -// Error declaration for workflow definitions -type Error struct { - // Name Domain-specific error name - Name string `json:"name" validate:"required"` - // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' - Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // OnError description - Description string `json:"description,omitempty"` +// Schedule the workflow. +type Schedule struct { + Every *Duration `json:"every,omitempty" validate:"omitempty"` + Cron string `json:"cron,omitempty" validate:"omitempty"` + After *Duration `json:"after,omitempty" validate:"omitempty"` + On *EventConsumptionStrategy `json:"on,omitempty" validate:"omitempty"` } -// Start definition -type Start struct { - StateName string `json:"stateName" validate:"required"` - Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` -} +const DefaultSchema = "json" -// UnmarshalJSON ... -func (s *Start) UnmarshalJSON(data []byte) error { - startMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &startMap); err != nil { - s.StateName, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("stateName", startMap, &s.StateName); err != nil { - return err - } - if err := unmarshalKey("schedule", startMap, &s.Schedule); err != nil { - return err - } - - return nil -} - -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - Transition Transition `json:"transition,omitempty"` - End End `json:"end,omitempty"` +// Schema represents the definition of a schema. +type Schema struct { + Format string `json:"format,omitempty"` + Document interface{} `json:"document,omitempty" validate:"omitempty"` + Resource *ExternalResource `json:"resource,omitempty" validate:"omitempty"` } -// Schedule ... -type Schedule struct { - // Time interval (must be repeating interval) described with ISO 8601 format. Declares when workflow instances will be automatically created. - Interval string `json:"interval,omitempty"` - Cron *Cron `json:"cron,omitempty"` - // Timezone name used to evaluate the interval & cron-expression. (default: UTC) - Timezone string `json:"timezone,omitempty"` +func (s *Schema) ApplyDefaults() { + if len(s.Format) == 0 { + s.Format = DefaultSchema + } } -// UnmarshalJSON ... -func (s *Schedule) UnmarshalJSON(data []byte) error { - scheduleMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &scheduleMap); err != nil { - s.Interval, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } +// UnmarshalJSON for Schema enforces "oneOf" behavior. +func (s *Schema) UnmarshalJSON(data []byte) error { + s.ApplyDefaults() - if err := unmarshalKey("interval", scheduleMap, &s.Interval); err != nil { + // Parse into a temporary map for flexibility + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { return err } - if err := unmarshalKey("cron", scheduleMap, &s.Cron); err != nil { - return err - } - if err := unmarshalKey("timezone", scheduleMap, &s.Timezone); err != nil { - return err - } - - return nil -} - -// Cron ... -type Cron struct { - // Repeating interval (cron expression) describing when the workflow instance should be created - Expression string `json:"expression" validate:"required"` - // Specific date and time (ISO 8601 format) when the cron expression invocation is no longer valid - ValidUntil string `json:"validUntil,omitempty"` -} -// UnmarshalJSON custom unmarshal function for Cron -func (c *Cron) UnmarshalJSON(data []byte) error { - cron := make(map[string]interface{}) - if err := json.Unmarshal(data, &cron); err != nil { - c.Expression, err = unmarshalString(data) - if err != nil { - return err + // Check for "document" + if doc, ok := raw["document"]; ok { + // Determine if "document" is a string or an object + switch doc.(type) { + case string: + s.Document = doc + case map[string]interface{}: + s.Document = doc + default: + return errors.New("invalid Schema: 'document' must be a string or an object") } - return nil } - c.Expression = requiresNotNilOrEmpty(cron["expression"]) - c.ValidUntil = requiresNotNilOrEmpty(cron["validUntil"]) - - return nil -} - -// Transition ... -type Transition struct { - // Name of state to transition to - NextState string `json:"nextState" validate:"required,min=1"` - // Array of events to be produced before the transition happens - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty" validate:"omitempty,dive"` - // If set to true, triggers workflow compensation when before this transition is taken. Default is false - Compensate bool `json:"compensate,omitempty"` -} - -// UnmarshalJSON ... -func (t *Transition) UnmarshalJSON(data []byte) error { - transitionMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &transitionMap); err != nil { - t.NextState, err = unmarshalString(data) + // Check for "resource" + if res, ok := raw["resource"]; ok { + var resource ExternalResource + resBytes, err := json.Marshal(res) if err != nil { - return err + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) + } + if err := json.Unmarshal(resBytes, &resource); err != nil { + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) } - return nil + s.Resource = &resource } - if err := unmarshalKey("compensate", transitionMap, &t.Compensate); err != nil { - return err - } - if err := unmarshalKey("produceEvents", transitionMap, &t.ProduceEvents); err != nil { - return err - } - if err := unmarshalKey("nextState", transitionMap, &t.NextState); err != nil { - return err + // Validate "oneOf" logic + if (s.Document != nil && s.Resource != nil) || (s.Document == nil && s.Resource == nil) { + return errors.New("invalid Schema: must specify either 'document' or 'resource', but not both") } return nil } -// OnError ... -type OnError struct { - // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used - ErrorRef string `json:"errorRef,omitempty"` - // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used - ErrorRefs []string `json:"errorRefs,omitempty"` - // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if retries were unsuccessful. - Transition *Transition `json:"transition,omitempty"` - // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if retries were unsuccessful. - End *End `json:"end,omitempty"` -} +// MarshalJSON for Schema marshals the correct field. +func (s *Schema) MarshalJSON() ([]byte, error) { + s.ApplyDefaults() -// OnEvents ... -type OnEvents struct { - // References one or more unique event names in the defined workflow events - EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Specifies how actions are to be performed (in sequence of parallel) - ActionMode ActionMode `json:"actionMode,omitempty"` - // Actions to be performed if expression matches - Actions []Action `json:"actions,omitempty" validate:"omitempty,dive"` - // Event data filter - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -// Action ... -type Action struct { - // Unique action definition name - Name string `json:"name,omitempty"` - FunctionRef FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions - EventRef EventRef `json:"eventRef,omitempty"` - // References a sub-workflow to be executed - SubFlowRef WorkflowRef `json:"subFlowRef,omitempty"` - // Sleep Defines time period workflow execution should sleep before / after function execution - Sleep Sleep `json:"sleep,omitempty"` - // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed - RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` - NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. Used only when `autoRetries` is set to `false` - RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Action data filter - ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` -} - -// End definition -type End struct { - // If true, completes all execution flows in the given workflow instance - Terminate bool `json:"terminate,omitempty"` - // Defines events that should be produced - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` - // If set to true, triggers workflow compensation. Default is false - Compensate bool `json:"compensate,omitempty"` - ContinueAs ContinueAs `json:"continueAs,omitempty"` -} - -// UnmarshalJSON ... -func (e *End) UnmarshalJSON(data []byte) error { - endMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &endMap); err != nil { - e.Terminate = false - e.Compensate = false - return nil - } - - if err := unmarshalKey("compensate", endMap, &e.Compensate); err != nil { - return err - } - if err := unmarshalKey("terminate", endMap, &e.Terminate); err != nil { - return err + if s.Document != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "document": s.Document, + }) } - if err := unmarshalKey("produceEvents", endMap, &e.ProduceEvents); err != nil { - return err - } - if err := unmarshalKey("continueAs", endMap, &e.ContinueAs); err != nil { - return err + if s.Resource != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "resource": s.Resource, + }) } - return nil + return nil, errors.New("invalid Schema: no valid field to marshal") } -// ContinueAs ... -type ContinueAs struct { - WorkflowRef - // TODO: add object or string data type - // If string type, an expression which selects parts of the states data output to become the workflow data input of continued execution. If object type, a custom object to become the workflow data input of the continued execution - Data interface{} `json:"data,omitempty"` - // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. Overwrites any specific settings set by that workflow - WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` +type ExternalResource struct { + Name string `json:"name,omitempty"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// ProduceEvent ... -type ProduceEvent struct { - // References a name of a defined event - EventRef string `json:"eventRef" validate:"required"` - // TODO: add object or string data type - // If String, expression which selects parts of the states data output to become the data of the produced event. If object a custom object to become the data of produced event. - Data interface{} `json:"data,omitempty"` - // Add additional event extension context attributes - ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` +type Use struct { + Authentications map[string]*AuthenticationPolicy `json:"authentications,omitempty" validate:"omitempty,dive"` + Errors map[string]*Error `json:"errors,omitempty" validate:"omitempty,dive"` + Extensions ExtensionList `json:"extensions,omitempty" validate:"omitempty,dive"` + Functions NamedTaskMap `json:"functions,omitempty" validate:"omitempty,dive"` + Retries map[string]*RetryPolicy `json:"retries,omitempty" validate:"omitempty,dive"` + Secrets []string `json:"secrets,omitempty"` + Timeouts map[string]*Timeout `json:"timeouts,omitempty" validate:"omitempty,dive"` + Catalogs map[string]*Catalog `json:"catalogs,omitempty" validate:"omitempty,dive"` } -// StateDataFilter ... -type StateDataFilter struct { - // Workflow expression to filter the state data input - Input string `json:"input,omitempty"` - // Workflow expression that filters the state data output - Output string `json:"output,omitempty"` +type Catalog struct { + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// EventDataFilter ... -type EventDataFilter struct { - // Workflow expression that filters of the event data (payload) - Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the event payload should be added/merged into. If not specified, denotes, the top-level state data element. - ToStateData string `json:"toStateData,omitempty"` +// FlowDirective represents a directive that can be an enumerated or free-form string. +type FlowDirective struct { + Value string `json:"-" validate:"required"` // Ensure the value is non-empty. } -// Branch Definition -type Branch struct { - // Branch name - Name string `json:"name" validate:"required"` - // Actions to be executed in this branch - Actions []Action `json:"actions" validate:"required,min=1"` - // Timeouts State specific timeouts - Timeouts BranchTimeouts `json:"timeouts,omitempty"` -} - -// BranchTimeouts ... -type BranchTimeouts struct { - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` -} - -// ActionDataFilter ... -type ActionDataFilter struct { - // Workflow expression that selects state data that the state action can use - FromStateData string `json:"fromStateData,omitempty"` - // Workflow expression that filters the actions' data results - Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element - ToStateData string `json:"toStateData,omitempty"` -} +type FlowDirectiveType string -// DataInputSchema ... -type DataInputSchema struct { - Schema string `json:"schema" validate:"required"` - FailOnValidationErrors *bool `json:"failOnValidationErrors" validate:"required"` -} - -// UnmarshalJSON ... -func (d *DataInputSchema) UnmarshalJSON(data []byte) error { - dataInSchema := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &dataInSchema); err != nil { - d.Schema, err = unmarshalString(data) - if err != nil { - return err - } - d.FailOnValidationErrors = &TRUE - return nil - } - if err := unmarshalKey("schema", dataInSchema, &d.Schema); err != nil { - return err - } - if err := unmarshalKey("failOnValidationErrors", dataInSchema, &d.FailOnValidationErrors); err != nil { - return err - } +const ( + FlowDirectiveContinue FlowDirectiveType = "continue" + FlowDirectiveExit FlowDirectiveType = "exit" + FlowDirectiveEnd FlowDirectiveType = "end" +) - return nil +// Enumerated values for FlowDirective. +var validFlowDirectives = map[string]struct{}{ + "continue": {}, + "exit": {}, + "end": {}, } -// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. -type Secrets []string - -// UnmarshalJSON ... -func (s *Secrets) UnmarshalJSON(data []byte) error { - var secretArray []string - if err := json.Unmarshal(data, &secretArray); err != nil { - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &secretArray); err != nil { - return err - } - } - *s = secretArray - return nil +// IsEnum checks if the FlowDirective matches one of the enumerated values. +func (f *FlowDirective) IsEnum() bool { + _, exists := validFlowDirectives[f.Value] + return exists } -// Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. -type Constants struct { - // Data represents the generic structure of the constants value - Data map[string]json.RawMessage `json:",omitempty"` +// IsTermination checks if the FlowDirective matches FlowDirectiveExit or FlowDirectiveEnd. +func (f *FlowDirective) IsTermination() bool { + return f.Value == string(FlowDirectiveExit) || f.Value == string(FlowDirectiveEnd) } -// UnmarshalJSON ... -func (c *Constants) UnmarshalJSON(data []byte) error { - constantData := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &constantData); err != nil { - // assumes it's a reference to a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &constantData); err != nil { - return err - } +func (f *FlowDirective) UnmarshalJSON(data []byte) error { + var value string + if err := json.Unmarshal(data, &value); err != nil { + return err } - c.Data = constantData + f.Value = value return nil } -// Sleep ... -type Sleep struct { - // Before Amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. - Before string `json:"before,omitempty"` - // After Amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. - After string `json:"after,omitempty"` +func (f *FlowDirective) MarshalJSON() ([]byte, error) { + return json.Marshal(f.Value) } diff --git a/model/workflow_test.go b/model/workflow_test.go new file mode 100644 index 0000000..c88de64 --- /dev/null +++ b/model/workflow_test.go @@ -0,0 +1,569 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + validator "github.com/go-playground/validator/v10" + + "github.com/stretchr/testify/assert" +) + +func TestDocument_JSONMarshal(t *testing.T) { + doc := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", + }, + } + + data, err := json.Marshal(doc) + assert.NoError(t, err) + + expectedJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` + + // Use JSON comparison to avoid formatting mismatches + var expected, actual map[string]interface{} + assert.NoError(t, json.Unmarshal([]byte(expectedJSON), &expected)) + assert.NoError(t, json.Unmarshal(data, &actual)) + assert.Equal(t, expected, actual) +} + +func TestDocument_JSONUnmarshal(t *testing.T) { + inputJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) + + expected := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", + }, + } + + assert.Equal(t, expected, doc) +} + +func TestDocument_JSONUnmarshal_InvalidJSON(t *testing.T) { + invalidJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "tags": { + "env": "prod", + "team": "workflow" + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` // Missing closing brace for "tags" + + var doc Document + err := json.Unmarshal([]byte(invalidJSON), &doc) + assert.Error(t, err) +} + +func TestDocument_Validation_MissingRequiredField(t *testing.T) { + inputJSON := `{ + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0" + }` // Missing "dsl" + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) // JSON is valid for unmarshalling + + // Validate the struct + err = validate.Struct(doc) + assert.Error(t, err) + + // Assert that the error is specifically about the missing "dsl" field + assert.Contains(t, err.Error(), "Key: 'Document.DSL' Error:Field validation for 'DSL' failed on the 'required' tag") +} + +func TestSchemaValidation(t *testing.T) { + + tests := []struct { + name string + jsonInput string + valid bool + }{ + // Valid Cases + { + name: "Valid Inline Schema", + jsonInput: `{ + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid External Schema Without Name", + jsonInput: `{ + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid Inline Schema with Format", + jsonInput: `{ + "format": "yaml", + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema with Format", + jsonInput: `{ + "format": "xml", + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + // Invalid Cases + { + name: "Invalid Both Document and Resource", + jsonInput: `{ + "document": "{\"key\":\"value\"}", + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: false, + }, + { + name: "Invalid Missing Both Document and Resource", + jsonInput: `{ + "format": "json" + }`, + valid: false, + }, + { + name: "Invalid Resource Without Endpoint", + jsonInput: `{ + "resource": { + "name": "external-schema" + } + }`, + valid: false, + }, + { + name: "Invalid Resource with Invalid URL", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "not-a-valid-url" + } + } + }`, + valid: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var schema Schema + err := json.Unmarshal([]byte(tt.jsonInput), &schema) + if tt.valid { + // Assert no unmarshalling error + assert.NoError(t, err) + + // Validate the struct + err = validate.Struct(schema) + assert.NoError(t, err, "Expected valid schema but got validation error: %v", err) + } else { + // Assert unmarshalling or validation error + if err == nil { + err = validate.Struct(schema) + } + assert.Error(t, err, "Expected validation error but got none") + } + }) + } +} + +type InputTestCase struct { + Name string + Input Input + ShouldErr bool +} + +func TestInputValidation(t *testing.T) { + cases := []InputTestCase{ + { + Name: "Valid input with Schema and From (object)", + Input: Input{ + Schema: &Schema{ + Format: "json", + Document: func() *string { + doc := "example schema" + return &doc + }(), + }, + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{ + "key": "value", + }, + }, + }, + ShouldErr: false, + }, + { + Name: "Invalid input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "example input", + }, + }, + ShouldErr: true, + }, + { + Name: "Valid input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "${ expression }", + }, + }, + ShouldErr: true, + }, + { + Name: "Invalid input with Empty From (expr)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: "", + }, + }, + ShouldErr: true, + }, + { + Name: "Invalid input with Empty From (object)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{}, + }, + }, + ShouldErr: true, + }, + { + Name: "Invalid input with Unsupported From Type", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: 123, + }, + }, + ShouldErr: true, + }, + { + Name: "Valid input with Schema Only", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + }, + ShouldErr: false, + }, + { + Name: "input with Neither Schema Nor From", + Input: Input{}, + ShouldErr: false, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Struct(tc.Input) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} + +func TestFlowDirectiveValidation(t *testing.T) { + cases := []struct { + Name string + Input FlowDirective + IsEnum bool // Expected result for IsEnum method. + ShouldErr bool // Expected result for validation. + }{ + { + Name: "Valid Enum: continue", + Input: FlowDirective{Value: "continue"}, + IsEnum: true, + ShouldErr: false, + }, + { + Name: "Valid Enum: exit", + Input: FlowDirective{Value: "exit"}, + IsEnum: true, + ShouldErr: false, + }, + { + Name: "Valid Enum: end", + Input: FlowDirective{Value: "end"}, + IsEnum: true, + ShouldErr: false, + }, + { + Name: "Valid Free-form String", + Input: FlowDirective{Value: "custom-directive"}, + IsEnum: false, + ShouldErr: false, + }, + { + Name: "Invalid Empty String", + Input: FlowDirective{Value: ""}, + IsEnum: false, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + // Validate the struct + err := validate.Var(tc.Input.Value, "required") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + + // Check IsEnum result + assert.Equal(t, tc.IsEnum, tc.Input.IsEnum(), "unexpected IsEnum result") + }) + } +} + +func TestUse_MarshalJSON(t *testing.T) { + use := Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), + }, + Errors: map[string]*Error{ + "error1": {Type: NewUriTemplate("http://example.com/errors"), Status: 404}, + }, + Extensions: ExtensionList{ + {Key: "ext1", Extension: &Extension{Extend: "call"}}, + {Key: "ext2", Extension: &Extension{Extend: "emit"}}, + {Key: "ext3", Extension: &Extension{Extend: "for"}}, + }, + Functions: NamedTaskMap{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com/"), Method: "GET"}}, + }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, + }, + }, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, + }, + } + + data, err := json.Marshal(use) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [ + {"ext1": {"extend": "call"}}, + {"ext2": {"extend": "emit"}}, + {"ext3": {"extend": "for"}} + ], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com/", "method": "GET"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }`, string(data)) +} + +func TestUse_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [{"ext1": {"extend": "call"}}], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }` + + var use Use + err := json.Unmarshal([]byte(jsonData), &use) + assert.NoError(t, err) + + assert.NotNil(t, use.Authentications["auth1"]) + assert.Equal(t, "alice", use.Authentications["auth1"].Basic.Username) + assert.Equal(t, "secret", use.Authentications["auth1"].Basic.Password) + + assert.NotNil(t, use.Errors["error1"]) + assert.Equal(t, "http://example.com/errors", use.Errors["error1"].Type.String()) + assert.Equal(t, 404, use.Errors["error1"].Status) + + assert.NotNil(t, use.Extensions.Key("ext1")) + assert.Equal(t, "call", use.Extensions.Key("ext1").Extend) + + assert.NotNil(t, use.Functions["func1"]) + assert.IsType(t, &CallHTTP{With: HTTPArguments{Endpoint: NewEndpoint("http://example.com")}}, use.Functions["func1"]) + + assert.NotNil(t, use.Retries["retry1"]) + assert.Equal(t, "PT5S", use.Retries["retry1"].Delay.AsExpression()) + assert.Equal(t, 3, use.Retries["retry1"].Limit.Attempt.Count) + + assert.Equal(t, []string{"secret1", "secret2"}, use.Secrets) + + assert.NotNil(t, use.Timeouts["timeout1"]) + assert.Equal(t, "PT1M", use.Timeouts["timeout1"].After.AsExpression()) + + assert.NotNil(t, use.Catalogs["catalog1"]) + assert.Equal(t, "http://example.com", use.Catalogs["catalog1"].Endpoint.URITemplate.String()) +} + +func TestUse_Validation(t *testing.T) { + use := &Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), + }, + Errors: map[string]*Error{ + "error1": {Type: &URITemplateOrRuntimeExpr{&LiteralUri{"http://example.com/errors"}}, Status: 404}, + }, + Extensions: ExtensionList{}, + Functions: map[string]Task{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com"), Method: "GET"}}, + }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, + }, + }, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, + }, + } + + err := validate.Struct(use) + assert.NoError(t, err) + + // Test with missing required fields + use.Catalogs["catalog1"].Endpoint = nil + err = validate.Struct(use) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s'", validationErr.Namespace(), validationErr.Tag()) + } + + assert.Contains(t, validationErrors.Error(), "Catalogs[catalog1].Endpoint") + assert.Contains(t, validationErrors.Error(), "required") + } +} diff --git a/parser/cmd/main.go b/parser/cmd/main.go new file mode 100644 index 0000000..b90b902 --- /dev/null +++ b/parser/cmd/main.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/serverlessworkflow/sdk-go/v3/parser" +) + +func main() { + if len(os.Args) < 2 { + fmt.Println("Usage: go run main.go ") + os.Exit(1) + } + + baseDir := os.Args[1] + supportedExt := []string{".json", ".yaml", ".yml"} + errCount := 0 + + err := filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + for _, ext := range supportedExt { + if filepath.Ext(path) == ext { + fmt.Printf("Validating: %s\n", path) + _, err := parser.FromFile(path) + if err != nil { + fmt.Printf("Validation failed for %s: %v\n", path, err) + errCount++ + } else { + fmt.Printf("Validation succeeded for %s\n", path) + } + break + } + } + } + return nil + }) + + if err != nil { + fmt.Printf("Error walking the path %s: %v\n", baseDir, err) + os.Exit(1) + } + + if errCount > 0 { + fmt.Printf("Validation failed for %d file(s).\n", errCount) + os.Exit(1) + } + + fmt.Println("All workflows validated successfully.") +} diff --git a/parser/parser.go b/parser/parser.go index fe9972d..3707132 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -21,9 +21,8 @@ import ( "path/filepath" "strings" - "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/serverlessworkflow/sdk-go/v3/model" - "github.com/serverlessworkflow/sdk-go/v2/model" "sigs.k8s.io/yaml" ) @@ -50,7 +49,9 @@ func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { if err := json.Unmarshal(source, workflow); err != nil { return nil, err } - if err := validator.GetValidator().Struct(workflow); err != nil { + + err = model.GetValidator().Struct(workflow) + if err != nil { return nil, err } return workflow, nil diff --git a/parser/parser_test.go b/parser/parser_test.go index 2bdbfa6..9852d5f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -15,244 +15,131 @@ package parser import ( - "os" - "path/filepath" "testing" - "github.com/serverlessworkflow/sdk-go/v2/model" "github.com/stretchr/testify/assert" ) -func TestBasicValidation(t *testing.T) { - rootPath := "./testdata/workflows" - files, err := os.ReadDir(rootPath) +func TestFromYAMLSource(t *testing.T) { + source := []byte(` +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com +`) + workflow, err := FromYAMLSource(source) assert.NoError(t, err) - for _, file := range files { - if !file.IsDir() { - workflow, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.NoError(t, err) - assert.NotEmpty(t, workflow.Name) - assert.NotEmpty(t, workflow.ID) - assert.NotEmpty(t, workflow.States) - } - } + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) } -func TestCustomValidators(t *testing.T) { - rootPath := "./testdata/workflows/witherrors" - files, err := os.ReadDir(rootPath) - assert.NoError(t, err) - for _, file := range files { - if !file.IsDir() { - _, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.Error(t, err) +func TestFromJSONSource(t *testing.T) { + source := []byte(`{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } } - } + ] +}`) + workflow, err := FromJSONSource(source) + assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) } func TestFromFile(t *testing.T) { - files := map[string]func(*testing.T, *model.Workflow){ - "./testdata/workflows/greetings.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - }, - "./testdata/workflows/greetings.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) - assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - }, - "./testdata/workflows/eventbaseddataandswitch.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Start", w.States[0].GetName()) - assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) - }, - "./testdata/workflows/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) + tests := []struct { + name string + filePath string + expectError bool + }{ + { + name: "Valid YAML File", + filePath: "testdata/valid_workflow.yaml", + expectError: false, + }, + { + name: "Invalid YAML File", + filePath: "testdata/invalid_workflow.yaml", + expectError: true, + }, + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Non-existent File", + filePath: "testdata/nonexistent_workflow.yaml", + expectError: true, }, - "./testdata/workflows/eventbasedgreetingexclusive.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) - assert.Equal(t, false, eventState.Exclusive) - }, - "./testdata/workflows/eventbasedgreeting.sw.p.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - }, - "./testdata/workflows/eventbasedswitch.sw.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.EventBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.EventConditions) - assert.NotEmpty(t, eventState.Name) - assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) - }, - "./testdata/workflows/applicationrequest.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 1) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token - assert.Equal(t, "test_token", bearerProperties) - }, - "./testdata/workflows/applicationrequest.multiauth.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 2) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token - assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", w.Auth.Defs[1].Name) - assert.Equal(t, model.AuthTypeBasic, w.Auth.Defs[1].Scheme) - basicProperties := w.Auth.Defs[1].Properties.(*model.BasicAuthProperties) - assert.Equal(t, "test_user", basicProperties.Username) - assert.Equal(t, "test_pwd", basicProperties.Password) + } - }, - "./testdata/workflows/applicationrequest.rp.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - "./testdata/workflows/applicationrequest.url.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - "./testdata/workflows/checkinbox.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, w.States, 2) - }, - // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/workflows/provisionorders.sw.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, operationState.OnErrors, 3) - assert.Equal(t, "Missing order id", operationState.OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", operationState.OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", operationState.OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) - }, "./testdata/workflows/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) - assert.False(t, w.States[1].GetEnd().Terminate) - }, "./testdata/workflows/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, dataBaseSwitchState) - assert.NotEmpty(t, dataBaseSwitchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].GetName()) - }, - // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/workflows/patientonboarding.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, w.Retries) - assert.Len(t, w.Retries, 1) - assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) - assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) - }, - "./testdata/workflows/greetings-secret.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Len(t, w.Secrets, 1) - }, - "./testdata/workflows/greetings-secret-file.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Len(t, w.Secrets, 3) - }, - "./testdata/workflows/greetings-constants-file.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.NotEmpty(t, w.Constants) - assert.NotEmpty(t, w.Constants.Data["Translations"]) - }, - "./testdata/workflows/roomreadings.timeouts.sw.json": func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - "./testdata/workflows/roomreadings.timeouts.file.sw.json": func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - "./testdata/workflows/purchaseorderworkflow.sw.json": func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + workflow, err := FromFile(tt.filePath) + if tt.expectError { + assert.Error(t, err) + assert.Nil(t, workflow) + } else { + assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) + } + }) + } +} + +func TestCheckFilePath(t *testing.T) { + tests := []struct { + name string + filePath string + expectError bool + }{ + { + name: "Valid YAML File Path", + filePath: "testdata/valid_workflow.yaml", + expectError: false, + }, + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Directory Path", + filePath: "testdata", + expectError: true, }, } - for file, f := range files { - workflow, err := FromFile(file) - assert.NoError(t, err, "Test File", file) - assert.NotNil(t, workflow, "Test File", file) - f(t, workflow) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := checkFilePath(tt.filePath) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) } } diff --git a/parser/testdata/applicationrequestfunctions.json b/parser/testdata/applicationrequestfunctions.json deleted file mode 100644 index 9416a78..0000000 --- a/parser/testdata/applicationrequestfunctions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/application.json#emailRejection" - } - ] - } \ No newline at end of file diff --git a/parser/testdata/applicationrequestretries.json b/parser/testdata/applicationrequestretries.json deleted file mode 100644 index 510e49a..0000000 --- a/parser/testdata/applicationrequestretries.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ] -} diff --git a/parser/testdata/constantsDogs.json b/parser/testdata/constantsDogs.json deleted file mode 100644 index cd3f101..0000000 --- a/parser/testdata/constantsDogs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Translations": { - "Dog": { - "Serbian": "pas", - "Spanish": "perro", - "French": "chien" - } - } -} \ No newline at end of file diff --git a/parser/testdata/errors.json b/parser/testdata/errors.json deleted file mode 100644 index 099e14d..0000000 --- a/parser/testdata/errors.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventbasedgreetingevents.json b/parser/testdata/eventbasedgreetingevents.json deleted file mode 100644 index b63f2bf..0000000 --- a/parser/testdata/eventbasedgreetingevents.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/invalid_workflow.yaml b/parser/testdata/invalid_workflow.yaml new file mode 100644 index 0000000..32e25a9 --- /dev/null +++ b/parser/testdata/invalid_workflow.yaml @@ -0,0 +1,25 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + # Missing "endpoint" field, making it invalid \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json deleted file mode 100644 index d5b17c7..0000000 --- a/parser/testdata/secrets.json +++ /dev/null @@ -1,5 +0,0 @@ -[ - "SECRET1", - "SECRET2", - "SECRET3" -] \ No newline at end of file diff --git a/parser/testdata/timeouts.json b/parser/testdata/timeouts.json deleted file mode 100644 index c3586bd..0000000 --- a/parser/testdata/timeouts.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } -} \ No newline at end of file diff --git a/parser/testdata/valid_workflow.json b/parser/testdata/valid_workflow.json new file mode 100644 index 0000000..204e917 --- /dev/null +++ b/parser/testdata/valid_workflow.json @@ -0,0 +1,19 @@ +{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/valid_workflow.yaml b/parser/testdata/valid_workflow.yaml new file mode 100644 index 0000000..19df6c4 --- /dev/null +++ b/parser/testdata/valid_workflow.yaml @@ -0,0 +1,25 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml deleted file mode 100644 index a8e77ff..0000000 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: applicantrequest -version: '1.0' -name: Applicant Request Decision Workflow -description: Determine if applicant request is valid -start: CheckApplication -specVersion: "0.7" -functions: - - name: sendRejectionEmailFunction - operation: http://myapis.org/applicationapi.json#emailRejection -states: - - name: CheckApplication - type: switch - dataConditions: - - condition: "{{ $.applicants[?(@.age >= 18)] }}" - transition: StartApplication - - condition: "{{ $.applicants[?(@.age < 18)] }}" - transition: RejectApplication - defaultCondition: - transition: RejectApplication - - name: StartApplication - type: operation - actions: - - subFlowRef: - workflowId: startApplicationWorkflowId - end: true - - name: RejectApplication - type: operation - actionMode: sequential - actions: - - functionRef: - refName: sendRejectionEmailFunction - arguments: - applicant: "{{ $.applicant }}" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json deleted file mode 100644 index b5fc7da..0000000 --- a/parser/testdata/workflows/applicationrequest.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "{{ $.applicants[?(@.age < 18)] }}", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "{{ $.applicant }}" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json deleted file mode 100644 index b1bf69c..0000000 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "{{ $.applicants[?(@.age < 18)] }}", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "{{ $.applicant }}" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json deleted file mode 100644 index ad2acce..0000000 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.7", - "start": { - "stateName": "CheckApplication" - }, - "functions": "testdata/applicationrequestfunctions.json", - "retries": "testdata/applicationrequestretries.json", - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "{{ $.applicants[?(@.age < 18)] }}", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "{{ $.applicant }}" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json deleted file mode 100644 index 64e62c6..0000000 --- a/parser/testdata/workflows/applicationrequest.url.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.7", - "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestfunctions.json", - "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json", - "start": { - "stateName": "CheckApplication" - }, - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "{{ $.applicants[?(@.age < 18)] }}", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "{{ $.applicant }}" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml deleted file mode 100644 index cd548fe..0000000 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.7" -start: - stateName: CheckInbox - schedule: - cron: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: checkInboxFunction - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "{{ $.messages }}" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "{{ $.singlemessage }}" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml deleted file mode 100644 index cdcec1e..0000000 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.7" -start: - stateName: CheckInbox - schedule: - cron: - expression: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: - refName: checkInboxFunction - transition: - nextState: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "{{ $.messages }}" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "{{ $.singlemessage }}" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json deleted file mode 100644 index 58482be..0000000 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "id": "eventbaseddataandswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.7", - "start": { - "stateName": "Start" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "Start", - "type": "switch", - "dataConditions": [ - { - "condition": "${ true }", - "transition": "CheckVisaStatus" - } - ] - }, - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "eventTimeout": "PT1H", - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json deleted file mode 100644 index a62a865..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "{{ $.data.greet }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "{{ $.greet.name }}" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "{{ $.payload.greeting }}" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json deleted file mode 100644 index f9beefa..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "events": "testdata/eventbasedgreetingevents.json", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "{{ $.data.greet }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "{{ $.greet.name }}" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "{{ $.payload.greeting }}" - }, - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json deleted file mode 100644 index 660e3ef..0000000 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "eventbasedgreetingexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "{{ $.data.greet }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "{{ $.greet.name }}" - } - } - } - ] - }, - { - "eventRefs": [ - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "{{ $.data.greet2 }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction2", - "arguments": { - "name": "{{ $.greet.name }}" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "{{ $.payload.greeting }}" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json deleted file mode 100644 index 30f6354..0000000 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "eventbasedgreetingnonexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent", - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "{{ $.data.greet }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "{{ $.greet.name }}" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "{{ $.payload.greeting }}" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json deleted file mode 100644 index 03563d9..0000000 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "id": "eventbasedswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "specVersion": "0.7", - "start": { - "stateName": "CheckVisaStatus" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "eventTimeout": "PT1H", - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml deleted file mode 100644 index 060ace9..0000000 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.7" -start: - stateName: Greet -constants: "testdata/constantsDogs.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ $SECRETS.SECRET1 }" - actionDataFilter: - dataResultsPath: "$.payload.greeting" - stateDataFilter: - dataOutputPath: "$.greeting" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml deleted file mode 100644 index a7afbe9..0000000 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.7" -start: - stateName: Greet -secrets: "testdata/secrets.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ $SECRETS.SECRET1 }" - actionDataFilter: - dataResultsPath: "$.payload.greeting" - stateDataFilter: - dataOutputPath: "$.greeting" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml deleted file mode 100644 index 1e6b71f..0000000 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.7" -start: - stateName: Greet -secrets: - - NAME -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ $SECRETS.NAME }" - actionDataFilter: - dataResultsPath: "$.payload.greeting" - stateDataFilter: - dataOutputPath: "$.greeting" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json deleted file mode 100644 index ba56384..0000000 --- a/parser/testdata/workflows/greetings.sw.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "name": "Greeting Workflow", - "description": "Greet Someone", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "parameters": { - "name": "{{ $.person.name }}" - } - }, - "actionDataFilter": { - "dataResultsPath": "{{ $.greeting }}" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml deleted file mode 100644 index de92c77..0000000 --- a/parser/testdata/workflows/greetings.sw.yaml +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.7" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "$.greet.name" - actionDataFilter: - dataResultsPath: "$.payload.greeting" - stateDataFilter: - dataOutputPath: "$.greeting" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml deleted file mode 100644 index 0841351..0000000 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2021 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: patientonboarding -name: Patient Onboarding Workflow -version: '1.0' -start: Onboard -specVersion: "0.7" -states: - - name: Onboard - type: event - onEvents: - - eventRefs: - - NewPatientEvent - actions: - - functionRef: StorePatient - - functionRef: AssignDoctor - - functionRef: ScheduleAppt - onErrors: - - error: ServiceNotAvailable - code: '503' - retryRef: ServicesNotAvailableRetryStrategy - end: true - end: true -events: - - name: StorePatient - type: new.patients.event - source: newpatient/+ -functions: - - name: StoreNewPatientInfo - operation: api/services.json#addPatient - - name: AssignDoctor - operation: api/services.json#assignDoctor - - name: ScheduleAppt - operation: api/services.json#scheduleAppointment -retries: - - name: ServicesNotAvailableRetryStrategy - delay: PT3S - maxAttempts: 10 - jitter: 0.0 - multiplier: 1.1 \ No newline at end of file diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json deleted file mode 100644 index 21119a2..0000000 --- a/parser/testdata/workflows/provisionorders.sw.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "id": "provisionorders", - "version": "1.0", - "specVersion": "0.7", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": "ProvisionOrder", - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ], - "states":[ - { - "name":"ProvisionOrder", - "type":"operation", - "actionMode":"sequential", - "actions":[ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .order }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .exceptions }" - }, - "transition": "ApplyOrder", - "onErrors": [ - { - "errorRef": "Missing order id", - "transition": "MissingId" - }, - { - "errorRef": "Missing order item", - "transition": "MissingItem" - }, - { - "errorRef": "Missing order quantity", - "transition": "MissingQuantity" - } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingIdExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingItemExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingQuantityExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ - { - "subFlowRef": "applyOrderWorkflowId" - } - ], - "end": true - } - ] -} diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json deleted file mode 100644 index 998b974..0000000 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "id": "order", - "name": "Purchase Order Workflow", - "version": "1.0", - "specVersion": "0.7", - "start": "StartNewOrder", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT30D", - "runBefore": "CancelOrder" - } - }, - "states": [ - { - "name": "StartNewOrder", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderCreatedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogNewOrderCreated" - } - } - ] - } - ], - "transition": { - "nextState": "WaitForOrderConfirmation" - } - }, - { - "name": "WaitForOrderConfirmation", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderConfirmedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderConfirmed" - } - } - ] - } - ], - "transition": { - "nextState": "WaitOrderShipped" - } - }, - { - "name": "WaitOrderShipped", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "ShipmentSentEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderShipped" - } - } - ] - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderFinishedEvent" - } - ] - } - }, - { - "name": "CancelOrder", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "CancelOrder" - } - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderCancelledEvent" - } - ] - } - } - ], - "events": [ - { - "name": "OrderCreatedEvent", - "type": "my.company.orders", - "source": "/orders/new", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderConfirmedEvent", - "type": "my.company.orders", - "source": "/orders/confirmed", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "ShipmentSentEvent", - "type": "my.company.orders", - "source": "/orders/shipped", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderFinishedEvent", - "type": "my.company.orders", - "kind": "produced" - }, - { - "name": "OrderCancelledEvent", - "type": "my.company.orders", - "kind": "produced" - } - ], - "functions": [ - { - "name": "LogNewOrderCreated", - "operation": "http.myorg.io/ordersservices.json#logcreated" - }, - { - "name": "LogOrderConfirmed", - "operation": "http.myorg.io/ordersservices.json#logconfirmed" - }, - { - "name": "LogOrderShipped", - "operation": "http.myorg.io/ordersservices.json#logshipped" - }, - { - "name": "CancelOrder", - "operation": "http.myorg.io/ordersservices.json#calcelorder" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json deleted file mode 100644 index c875863..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.7", - "start": "ConsumeReading", - "timeouts": "testdata/timeouts.json", - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json deleted file mode 100644 index b322f8c..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.sw.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.7", - "start": "ConsumeReading", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } - }, - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json deleted file mode 100644 index 30f75cd..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "{{ $.applicants[?(@.age < 18)] }}", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "{{ $.applicant }}" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/test/utils.go b/test/utils.go new file mode 100644 index 0000000..d478edc --- /dev/null +++ b/test/utils.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "sigs.k8s.io/yaml" +) + +func AssertYAMLEq(t *testing.T, expected, actual string) { + var expectedMap, actualMap map[string]interface{} + + // Unmarshal the expected YAML + err := yaml.Unmarshal([]byte(expected), &expectedMap) + assert.NoError(t, err, "failed to unmarshal expected YAML") + + // Unmarshal the actual YAML + err = yaml.Unmarshal([]byte(actual), &actualMap) + assert.NoError(t, err, "failed to unmarshal actual YAML") + + // Assert equality of the two maps + assert.Equal(t, expectedMap, actualMap, "YAML structures do not match") +} diff --git a/tools.mod b/tools.mod index a5c10f5..203ee14 100644 --- a/tools.mod +++ b/tools.mod @@ -1,6 +1,6 @@ -module github.com/serverlessworkflow/sdk-go +module github.com/serverlessworkflow/sdk-go/v3 -go 1.14 +go 1.22 require ( github.com/google/addlicense v0.0.0-20210428195630-6d92264d7170 // indirect diff --git a/util/floatstr/floatstr.go b/util/floatstr/floatstr.go deleted file mode 100644 index 3261fdd..0000000 --- a/util/floatstr/floatstr.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "fmt" - "strconv" -) - -// Float32OrString is a type that can hold a float32 or a string. -// implementation borrowed from apimachinary intstr package: https://github.com/kubernetes/apimachinery/blob/master/pkg/util/intstr/intstr.go -type Float32OrString struct { - Type Type `json:"type,omitempty"` - FloatVal float32 `json:"floatVal,omitempty"` - StrVal string `json:"strVal,omitempty"` -} - -// Type represents the stored type of Float32OrString. -type Type int64 - -const ( - // Float ... - Float Type = iota // The Float32OrString holds a float. - // String ... - String // The Float32OrString holds a string. -) - -// FromFloat creates an Float32OrString object with a float32 value. It is -// your responsibility not to call this method with a value greater -// than float32. -func FromFloat(val float32) Float32OrString { - return Float32OrString{Type: Float, FloatVal: val} -} - -// FromString creates a Float32OrString object with a string value. -func FromString(val string) Float32OrString { - return Float32OrString{Type: String, StrVal: val} -} - -// Parse the given string and try to convert it to a float32 before -// setting it as a string value. -func Parse(val string) Float32OrString { - f, err := strconv.ParseFloat(val, 32) - if err != nil { - return FromString(val) - } - return FromFloat(float32(f)) -} - -// UnmarshalJSON implements the json.Unmarshaller interface. -func (floatstr *Float32OrString) UnmarshalJSON(value []byte) error { - if value[0] == '"' { - floatstr.Type = String - return json.Unmarshal(value, &floatstr.StrVal) - } - floatstr.Type = Float - return json.Unmarshal(value, &floatstr.FloatVal) -} - -// MarshalJSON implements the json.Marshaller interface. -func (floatstr Float32OrString) MarshalJSON() ([]byte, error) { - switch floatstr.Type { - case Float: - return json.Marshal(floatstr.FloatVal) - case String: - return json.Marshal(floatstr.StrVal) - default: - return []byte{}, fmt.Errorf("impossible Float32OrString.Type") - } -} - -// String returns the string value, or the float value. -func (floatstr *Float32OrString) String() string { - if floatstr == nil { - return "" - } - if floatstr.Type == String { - return floatstr.StrVal - } - return strconv.FormatFloat(float64(floatstr.FloatValue()), 'E', -1, 32) -} - -// FloatValue returns the FloatVal if type float32, or if -// it is a String, will attempt a conversion to float32, -// returning 0 if a parsing error occurs. -func (floatstr *Float32OrString) FloatValue() float32 { - if floatstr.Type == String { - f, _ := strconv.ParseFloat(floatstr.StrVal, 32) - return float32(f) - } - return floatstr.FloatVal -} diff --git a/util/floatstr/floatstr_test.go b/util/floatstr/floatstr_test.go deleted file mode 100644 index c01f2b2..0000000 --- a/util/floatstr/floatstr_test.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "k8s.io/apimachinery/pkg/util/yaml" - "reflect" - "testing" -) - -func TestFromFloat(t *testing.T) { - i := FromFloat(93.93) - if i.Type != Float || i.FloatVal != 93.93 { - t.Errorf("Expected FloatVal=93.93, got %+v", i) - } -} - -func TestFromString(t *testing.T) { - i := FromString("76.76") - if i.Type != String || i.StrVal != "76.76" { - t.Errorf("Expected StrVal=\"76.76\", got %+v", i) - } -} - -type FloatOrStringHolder struct { - FOrS Float32OrString `json:"val"` -} - -func TestIntOrStringUnmarshalJSON(t *testing.T) { - cases := []struct { - input string - result Float32OrString - }{ - {"{\"val\": 123.123}", FromFloat(123.123)}, - {"{\"val\": \"123.123\"}", FromString("123.123")}, - } - - for _, c := range cases { - var result FloatOrStringHolder - if err := json.Unmarshal([]byte(c.input), &result); err != nil { - t.Errorf("Failed to unmarshal input '%v': %v", c.input, err) - } - if result.FOrS != c.result { - t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result) - } - } -} - -func TestIntOrStringMarshalJSON(t *testing.T) { - cases := []struct { - input Float32OrString - result string - }{ - {FromFloat(123.123), "{\"val\":123.123}"}, - {FromString("123.123"), "{\"val\":\"123.123\"}"}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - result, err := json.Marshal(&input) - if err != nil { - t.Errorf("Failed to marshal input '%v': %v", input, err) - } - if string(result) != c.result { - t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result)) - } - } -} - -func TestIntOrStringMarshalJSONUnmarshalYAML(t *testing.T) { - cases := []struct { - input Float32OrString - }{ - {FromFloat(123.123)}, - {FromString("123.123")}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - jsonMarshalled, err := json.Marshal(&input) - if err != nil { - t.Errorf("1: Failed to marshal input: '%v': %v", input, err) - } - - var result FloatOrStringHolder - err = yaml.Unmarshal(jsonMarshalled, &result) - if err != nil { - t.Errorf("2: Failed to unmarshal '%+v': %v", string(jsonMarshalled), err) - } - - if !reflect.DeepEqual(input, result) { - t.Errorf("3: Failed to marshal input '%+v': got %+v", input, result) - } - } -} diff --git a/validator/validator.go b/validator/validator.go deleted file mode 100644 index fbff15f..0000000 --- a/validator/validator.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - validator "github.com/go-playground/validator/v10" -) - -// TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation - -var validate *validator.Validate - -func init() { - validate = validator.New() -} - -// GetValidator gets the default validator.Validate reference -func GetValidator() *validator.Validate { - return validate -} pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy