diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml deleted file mode 100644 index 6d97acb..0000000 --- a/.github/workflows/Go-SDK-Check-k8s-integration.yaml +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Go SDK Check k8s integration -on: - pull_request: - paths-ignore: - - "**.md" - - "hack/**" - - "LICENSE" - - "Makefile" - branches: - - main - -permissions: - contents: read - -env: - GO_VERSION: 1.22 -jobs: - basic_checks: - name: Basic Checks - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v4 - - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v5 - with: - go-version: ${{ env.GO_VERSION }} - id: go - - name: Cache dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/go-build - ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - name: Cache tools - uses: actions/cache@v4 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} - restore-keys: | - ${{ runner.os }}-go-tools- - - name: Check K8s Integration - run: | - export GOPATH=$(go env GOPATH) - make kube-integration \ No newline at end of file diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 7b8a8a3..8d4da2f 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -1,10 +1,10 @@ -# Copyright 2020 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ # limitations under the License. name: Go SDK PR Checks + on: pull_request: paths-ignore: @@ -23,11 +24,13 @@ on: branches: - main + permissions: contents: read env: GO_VERSION: 1.22 + jobs: basic_checks: name: Basic Checks @@ -35,12 +38,14 @@ jobs: steps: - name: Checkout Code uses: actions/checkout@v4 - - name: Setup Go ${{ env.GO_VERSION }} + + - name: Setup Go uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} id: go - - name: Cache dependencies + + - name: Cache Go Modules uses: actions/cache@v4 with: path: | @@ -49,33 +54,73 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - name: Cache tools + + - name: Cache Tools uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} restore-keys: | ${{ runner.os }}-go-tools- + - name: Check Headers run: | make addheaders changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are missing the headers: \n$changed_files\n Did you run 'make lint' before sending the PR" && exit 1) - - name: Check DeepCopy Generation - run: | - export GOPATH=$(go env GOPATH) - make deepcopy + if [[ -n "$changed_files" ]]; then + echo "โŒ Some files are missing headers:\n$changed_files" + exit 1 + fi + - name: Check Formatting run: | make fmt changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are not formatted properly: \n$changed_files\n Did you run 'make test' before sending the PR?" && exit 1) - - name: Check lint - uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues + if [[ -n "$changed_files" ]]; then + echo "โŒ Some files are not formatted correctly:\n$changed_files" + exit 1 + fi + + - name: Run Linter + uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues with: version: latest - - name: Install cover - run: go get -modfile=tools.mod golang.org/x/tools/cmd/cover + + - name: Install Cover Tool + run: go install golang.org/x/tools/cmd/cover@latest + - name: Run Unit Tests + run: go test ./... -coverprofile=test_coverage.out -covermode=atomic + + - name: Upload Coverage Report + uses: actions/upload-artifact@v3 + with: + name: Test Coverage Report + path: test_coverage.out + + integration_tests: + name: Integration Tests + runs-on: ubuntu-latest + needs: basic_checks + steps: + - name: Checkout Code + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + id: go + + - name: Run Integration Tests run: | - go test ./... -coverprofile test_coverage.out -covermode=atomic + chmod +x ./hack/integration-test.sh + ./hack/integration-test.sh + continue-on-error: true + + - name: Upload JUnit Report + if: always() + uses: actions/upload-artifact@v3 + with: + name: Integration Test JUnit Report + path: ./integration-test-junit.xml diff --git a/.gitignore b/.gitignore index 55109f1..914d9c4 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ bin *.out .vscode +integration-test-junit.xml diff --git a/Makefile b/Makefile index a8f36db..767d158 100644 --- a/Makefile +++ b/Makefile @@ -22,44 +22,14 @@ lint: .PHONY: test coverage="false" -test: deepcopy buildergen +test: @echo "๐Ÿงช Running tests..." @go test ./... @echo "โœ… Tests completed!" -deepcopy: $(DEEPCOPY_GEN) ## Download deepcopy-gen locally if necessary. - @echo "๐Ÿ“ฆ Running deepcopy-gen..." - @./hack/deepcopy-gen.sh deepcopy > /dev/null - @make lint - @echo "โœ… Deepcopy generation and linting completed!" - -buildergen: $(BUILDER_GEN) ## Download builder-gen locally if necessary. - @echo "๐Ÿ“ฆ Running builder-gen..." - @./hack/builder-gen.sh buildergen > /dev/null - @make lint - @echo "โœ… Builder generation and linting completed!" - -.PHONY: kube-integration -kube-integration: controller-gen - @echo "๐Ÿ“ฆ Generating Kubernetes objects..." - @$(CONTROLLER_GEN) object:headerFile="./hack/boilerplate.txt" paths="./kubernetes/api/..." - @echo "๐Ÿ“ฆ Generating Kubernetes CRDs..." - @$(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./kubernetes/..." output:crd:artifacts:config=config/crd/bases - @make lint - @echo "โœ… Kubernetes integration completed!" - - -#################################### -# install controller-gen tool -## Location to install dependencies to -LOCALBIN ?= $(shell pwd)/bin -$(LOCALBIN): - mkdir -p $(LOCALBIN) - -CONTROLLER_TOOLS_VERSION ?= v0.16.3 -CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen -.PHONY: controller-gen -controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. -$(CONTROLLER_GEN): $(LOCALBIN) - test -s $(LOCALBIN)/controller-gen || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-tools/cmd/controller-gen@$(CONTROLLER_TOOLS_VERSION) +.PHONY: integration-test +integration-test: + @echo "๐Ÿ”„ Running integration tests..." + @./hack/integration-test.sh + @echo "โœ… Integration tests completed!" \ No newline at end of file diff --git a/README.md b/README.md index e4d87c6..786333e 100644 --- a/README.md +++ b/README.md @@ -1,107 +1,154 @@ # Go SDK for Serverless Workflow -Here you will find all the [specification types](https://github.com/serverlessworkflow/specification/blob/main/schema/workflow.json) defined by our Json Schemas, in Go. -Table of Contents -================= +The Go SDK for Serverless Workflow provides the [specification types](https://github.com/serverlessworkflow/specification/blob/v1.0.0-alpha5/schema/workflow.yaml) defined by the Serverless Workflow DSL in Go, making it easy to parse, validate, and interact with workflows. + +--- + +## Table of Contents - [Status](#status) - [Releases](#releases) -- [How to Use](#how-to-use) - - [Parsing Serverless Workflow files](#parsing-serverless-workflow-files) -- [Slack Channel](#slack-channel) -- [Contributors Guide](#contributors-guide) +- [Getting Started](#getting-started) + - [Installation](#installation) + - [Parsing Workflow Files](#parsing-workflow-files) + - [Programmatic Workflow Creation](#programmatic-workflow-creation) +- [Slack Community](#slack-community) +- [Contributing](#contributing) - [Code Style](#code-style) - [EditorConfig](#editorconfig) - [Known Issues](#known-issues) +--- ## Status -Current status of features implemented in the SDK is listed in the table below: + +The current status of features implemented in the SDK is listed below: | Feature | Status | |-------------------------------------------- | ------------------ | -| Parse workflow JSON and YAML definitions | :heavy_check_mark: | -| Programmatically build workflow definitions | :no_entry_sign: | +| Parse workflow JSON and YAML definitions | :heavy_check_mark: | +| Programmatically build workflow definitions | :heavy_check_mark: | | Validate workflow definitions (Schema) | :heavy_check_mark: | -| Validate workflow definitions (Integrity) | :heavy_check_mark: | +| Validate workflow definitions (Integrity) | :no_entry_sign: | | Generate workflow diagram (SVG) | :no_entry_sign: | +--- ## Releases -| Latest Releases | Conformance to spec version | -|:--------------------------------------------------------------------------:| :---: | -| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | -| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | -| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.4.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | -## How to use +| Latest Releases | Conformance to Spec Version | +|:--------------------------------------------------------------------------:|:------------------------------------------------------------------------:| +| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | +| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.4.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0-alpha5) | + +--- -Run the following command in the root of your Go's project: +## Getting Started -```shell script -$ go get github.com/serverlessworkflow/sdk-go/v2 +### Installation + +To use the SDK in your Go project, run the following command: + +```shell +$ go get github.com/serverlessworkflow/sdk-go/v3 ``` -Your `go.mod` file should be updated to add a dependency from the Serverless Workflow specification. +This will update your `go.mod` file to include the Serverless Workflow SDK as a dependency. -To use the generated types, import the package in your go file like this: +Import the SDK in your Go file: ```go -import "github.com/serverlessworkflow/sdk-go/v2/model" +import "github.com/serverlessworkflow/sdk-go/v3/model" ``` -Then just reference the package in your Go file like `myfunction := model.Function{}`. +You can now use the SDK types and functions, for example: -### Parsing Serverless Workflow files +```go +package main -Serverless Workflow Specification supports YAML and JSON files for Workflow definitions. -To transform such files into a Go data structure, use: +import ( + "github.com/serverlessworkflow/sdk-go/v3/builder" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func main() { + workflowBuilder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + workflow, _ := builder.Object(workflowBuilder) + // use your models +} + +``` + +### Parsing Workflow Files + +The Serverless Workflow Specification supports YAML and JSON files. Use the following example to parse a workflow file into a Go data structure: ```go -package sw +package main import ( - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/parser" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/parser" ) func ParseWorkflow(filePath string) (*model.Workflow, error) { workflow, err := parser.FromFile(filePath) if err != nil { return nil, err - } + } return workflow, nil -} +} ``` -The `Workflow` structure then can be used in your application. +This `Workflow` structure can then be used programmatically in your application. -## Slack Channel +### Programmatic Workflow Creation -Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello ๐Ÿ™‹. +Support for building workflows programmatically is planned for future releases. Stay tuned for updates in upcoming versions. -## Contributors Guide +--- -This guide aims to guide newcomers to getting started with the project standards. +## Slack Community +Join the conversation and connect with other contributors on the [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf). Find us in the `#serverless-workflow-sdk` channel and say hello! ๐Ÿ™‹ -### Code Style +--- -For this project we use basically the default configuration for most used IDEs. -For the configurations below, make sure to properly configure your IDE: +## Contributing -- **imports**: goimports +We welcome contributions to improve this SDK. Please refer to the sections below for guidance on maintaining project standards. -This should be enough to get you started. +### Code Style -If you are unsure that your IDE is not correctly configured, you can run the lint checks: +- Use `goimports` for import organization. +- Lint your code with: ```bash make lint ``` -If something goes wrong, the error will be printed, e.g.: +To automatically fix lint issues, use: + +```bash +make lint params=--fix +``` + +Example lint error: + ```bash $ make lint make addheaders @@ -112,26 +159,23 @@ util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) make: *** [lint] Error 1 ``` -Lint issues can be fixed with the `--fix` flag, this command can be used: -```bash -make lint params=--fix -``` - - ### EditorConfig -For IntelliJ you can find an example `editorconfig` file [here](contrib/intellij.editorconfig). To use it please visit -the Jetbrains [documentation](https://www.jetbrains.com/help/idea/editorconfig.html). +For IntelliJ users, an example `.editorconfig` file is available [here](contrib/intellij.editorconfig). See the [Jetbrains documentation](https://www.jetbrains.com/help/idea/editorconfig.html) for usage details. ### Known Issues -On MacOSX/darwin you might get this issue: +#### MacOS Issue: + +On MacOS, you might encounter the following error: + ``` - goimports: can't extract issues from gofmt diff output +goimports: can't extract issues from gofmt diff output ``` -To solve install the `diffutils` package: + +To resolve this, install `diffutils`: ```bash - brew install diffutils +brew install diffutils ``` diff --git a/builder/builder.go b/builder/builder.go index 97ef3b2..45ccc2e 100644 --- a/builder/builder.go +++ b/builder/builder.go @@ -16,46 +16,52 @@ package builder import ( "encoding/json" + "fmt" - "sigs.k8s.io/yaml" + "github.com/serverlessworkflow/sdk-go/v3/model" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" ) +// New initializes a new WorkflowBuilder instance. func New() *model.WorkflowBuilder { return model.NewWorkflowBuilder() } +// Yaml generates YAML output from the WorkflowBuilder using custom MarshalYAML implementations. func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { - data, err := Json(builder) + workflow, err := Object(builder) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to build workflow object: %w", err) } - return yaml.JSONToYAML(data) + return yaml.Marshal(workflow) } +// Json generates JSON output from the WorkflowBuilder. func Json(builder *model.WorkflowBuilder) ([]byte, error) { workflow, err := Object(builder) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to build workflow object: %w", err) } - return json.Marshal(workflow) + return json.MarshalIndent(workflow, "", " ") } +// Object builds and validates the Workflow object from the builder. func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { workflow := builder.Build() - ctx := model.NewValidatorContext(&workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, err + + // Validate the workflow object + if err := model.GetValidator().Struct(workflow); err != nil { + return nil, fmt.Errorf("workflow validation failed: %w", err) } - return &workflow, nil + + return workflow, nil } +// Validate validates any given object using the Workflow model validator. func Validate(object interface{}) error { - ctx := model.NewValidatorContext(object) - if err := val.GetValidator().StructCtx(ctx, object); err != nil { - return val.WorkflowError(err) + if err := model.GetValidator().Struct(object); err != nil { + return fmt.Errorf("validation failed: %w", err) } return nil } diff --git a/builder/builder_test.go b/builder/builder_test.go index 97b8c84..cbec324 100644 --- a/builder/builder_test.go +++ b/builder/builder_test.go @@ -15,106 +15,163 @@ package builder import ( + "errors" "testing" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" + "github.com/go-playground/validator/v10" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/test" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" ) -func prepareBuilder() *model.WorkflowBuilder { - builder := New().Key("key test").ID("id test") - - builder.AddFunctions().Name("function name").Operation("http://test") - builder.AddFunctions().Name("function name2").Operation("http://test") - - function3 := builder.AddFunctions().Name("function name2").Operation("http://test") - builder.RemoveFunctions(function3) - - state1 := builder.AddStates(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) +func TestBuilder_Yaml(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + // Generate YAML from the builder + yamlData, err := Yaml(builder) + assert.NoError(t, err) - inject := state1.InjectState() - inject.Data(map[string]model.Object{ - "test": model.FromMap(map[string]any{}), - }) + // Define the expected YAML structure + expectedYAML := `document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: +- task1: + call: http + if: ${condition} + with: + method: GET + endpoint: http://example.com +` - return builder + // Use assertYAMLEq to compare YAML structures + test.AssertYAMLEq(t, expectedYAML, string(yamlData)) } -func TestValidate(t *testing.T) { - state1 := model.NewStateBuilder(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) - err := Validate(state1) +func TestBuilder_Json(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + jsonData, err := Json(builder) assert.NoError(t, err) - state2 := model.NewStateBuilder(). - Type(model.StateTypeInject) - state2.End().Terminate(true) - err = Validate(state2.Build()) - if assert.Error(t, err) { - var workflowErrors val.WorkflowErrors - if errors.As(err, &workflowErrors) { - assert.Equal(t, "state.name is required", workflowErrors[0].Error()) - } else { - // Handle other error types if necessary - t.Errorf("Unexpected error: %v", err) - } - } + expectedJSON := `{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "if": "${condition}", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +}` + assert.JSONEq(t, expectedJSON, string(jsonData)) } -func TestObject(t *testing.T) { - workflow, err := Object(prepareBuilder()) - if assert.NoError(t, err) { - assert.Equal(t, "key test", workflow.Key) - assert.Equal(t, "id test", workflow.ID) - assert.Equal(t, "0.8", workflow.SpecVersion) - assert.Equal(t, "jq", workflow.ExpressionLang.String()) - assert.Equal(t, 2, len(workflow.Functions)) - - assert.Equal(t, "function name", workflow.Functions[0].Name) - assert.Equal(t, "function name2", workflow.Functions[1].Name) - } +func TestBuilder_Object(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + workflow, err := Object(builder) + assert.NoError(t, err) + assert.NotNil(t, workflow) + + assert.Equal(t, "1.0.0", workflow.Document.DSL) + assert.Equal(t, "examples", workflow.Document.Namespace) + assert.Equal(t, "example-workflow", workflow.Document.Name) + assert.Equal(t, "1.0.0", workflow.Document.Version) + assert.Len(t, *workflow.Do, 1) + assert.Equal(t, "http", (*workflow.Do)[0].Task.(*model.CallHTTP).Call) } -func TestJson(t *testing.T) { - data, err := Json(prepareBuilder()) - if assert.NoError(t, err) { - d := `{"id":"id test","key":"key test","version":"","specVersion":"0.8","expressionLang":"jq","states":[{"name":"state","type":"inject","end":{"terminate":true},"data":{"test":{}}}],"functions":[{"name":"function name","operation":"http://test","type":"rest"},{"name":"function name2","operation":"http://test","type":"rest"}]}` - assert.Equal(t, d, string(data)) +func TestBuilder_Validate(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{ + DSL: "1.0.0", + Namespace: "examples", + Name: "example-workflow", + Version: "1.0.0", + }, + Do: &model.TaskList{ + { + Key: "task1", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }, + }, + }, } -} -func TestYaml(t *testing.T) { - data, err := Yaml(prepareBuilder()) - if assert.NoError(t, err) { - d := `expressionLang: jq -functions: -- name: function name - operation: http://test - type: rest -- name: function name2 - operation: http://test - type: rest -id: id test -key: key test -specVersion: "0.8" -states: -- data: - test: {} - end: - terminate: true - name: state - type: inject -version: "" -` + err := Validate(workflow) + assert.NoError(t, err) - assert.Equal(t, d, string(data)) + // Test validation failure + workflow.Do = &model.TaskList{ + { + Key: "task2", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", // Missing Endpoint + }, + }, + }, + } + err = Validate(workflow) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + t.Logf("Validation errors: %v", validationErrors) + assert.Contains(t, validationErrors.Error(), "Do[0].Task.With.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") } } diff --git a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml deleted file mode 100644 index 1dfc265..0000000 --- a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml +++ /dev/null @@ -1,1967 +0,0 @@ -# Copyright 2024 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - annotations: - controller-gen.kubebuilder.io/version: v0.16.3 - name: serverlessworkflows.serverlessworkflow.io -spec: - group: serverlessworkflow.io - names: - kind: ServerlessWorkflow - listKind: ServerlessWorkflowList - plural: serverlessworkflows - singular: serverlessworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - description: ServerlessWorkflow ... - properties: - apiVersion: - description: |- - APIVersion defines the versioned schema of this representation of an object. - Servers should convert recognized schemas to the latest internal value, and - may reject unrecognized values. - More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources - type: string - kind: - description: |- - Kind is a string value representing the REST resource this object represents. - Servers may infer this from the endpoint the client submits requests to. - Cannot be updated. - In CamelCase. - More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds - type: string - metadata: - type: object - spec: - description: ServerlessWorkflowSpec defines a base API for integration - test with operator-sdk - properties: - annotations: - description: |- - Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important - qualities. - items: - type: string - type: array - auth: - description: |- - Auth definitions can be used to define authentication information that should be applied to resources defined - in the operation property of function definitions. It is not used as authentication information for the - function invocation, but just to access the resource containing the function invocation information. - x-kubernetes-preserve-unknown-fields: true - autoRetries: - description: AutoRetries If set to true, actions should automatically - be retried on unchecked errors. Default is false - type: boolean - constants: - additionalProperties: - description: |- - RawMessage is a raw encoded JSON value. - It implements [Marshaler] and [Unmarshaler] and can - be used to delay JSON decoding or precompute a JSON encoding. - format: byte - type: string - description: |- - Constants Workflow constants are used to define static, and immutable, data which is available to - Workflow Expressions. - type: object - dataInputSchema: - description: DataInputSchema URI or Object of the JSON Schema used - to validate the workflow data input - properties: - failOnValidationErrors: - type: boolean - schema: - x-kubernetes-preserve-unknown-fields: true - required: - - failOnValidationErrors - - schema - type: object - description: - description: Workflow description. - type: string - errors: - description: Defines checked errors that can be explicitly handled - during workflow execution. - items: - description: Error declaration for workflow definitions - properties: - code: - description: |- - Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. - Should not be defined if error is set to '*'. - type: string - description: - description: OnError description. - type: string - name: - description: Name Domain-specific error name. - type: string - required: - - name - type: object - type: array - events: - items: - description: Event used to define events and their correlations - properties: - correlation: - description: Define event correlation rules for this event. - Only used for consumed events. - items: - description: Correlation define event correlation rules for - an event. Only used for `consumed` events - properties: - contextAttributeName: - description: CloudEvent Extension Context Attribute name - type: string - contextAttributeValue: - description: CloudEvent Extension Context Attribute value - type: string - required: - - contextAttributeName - type: object - type: array - dataOnly: - default: true - description: |- - If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload - and context attributes should be accessible. Defaults to true. - type: boolean - kind: - default: consumed - description: Defines the CloudEvent as either 'consumed' or - 'produced' by the workflow. Defaults to `consumed`. - enum: - - consumed - - produced - type: string - metadata: - description: Metadata information - x-kubernetes-preserve-unknown-fields: true - name: - description: Unique event name. - type: string - source: - description: CloudEvent source. - type: string - type: - description: CloudEvent type. - type: string - required: - - name - - type - type: object - type: array - expressionLang: - default: jq - description: Identifies the expression language used for workflow - expressions. Default is 'jq'. - enum: - - jq - - jsonpath - - cel - type: string - functions: - items: - description: Function ... - properties: - authRef: - description: References an auth definition name to be used to - access to resource defined in the operation parameter. - type: string - metadata: - description: Metadata information - x-kubernetes-preserve-unknown-fields: true - name: - description: Unique function name - type: string - operation: - description: |- - If type is `rest`, #. - If type is `rpc`, ##. - If type is `expression`, defines the workflow expression. If the type is `custom`, - #. - type: string - type: - default: rest - description: |- - Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. - Default is `rest`. - enum: - - rest - - rpc - - expression - - graphql - - odata - - asyncapi - - custom - type: string - required: - - name - - operation - type: object - type: array - id: - description: Workflow unique identifier - type: string - keepActive: - description: |- - If "true", workflow instances is not terminated when there are no active execution paths. - Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" - type: boolean - key: - description: Key Domain-specific workflow identifier - type: string - metadata: - description: Metadata custom information shared with the runtime. - x-kubernetes-preserve-unknown-fields: true - name: - description: Workflow name - type: string - retries: - items: - description: Retry ... - properties: - delay: - description: Time delay between retry attempts (ISO 8601 duration - format) - type: string - increment: - description: Static value by which the delay increases during - each attempt (ISO 8601 time format) - type: string - jitter: - description: If float type, maximum amount of random time added - or subtracted from the delay between each retry relative to - total delay (between 0 and 1). If string type, absolute maximum - amount of random time added or subtracted from the delay between - each retry (ISO 8601 duration format) - properties: - floatVal: - type: number - strVal: - type: string - type: - description: Type represents the stored type of Float32OrString. - format: int64 - type: integer - type: object - maxAttempts: - anyOf: - - type: integer - - type: string - description: Maximum number of retry attempts. - x-kubernetes-int-or-string: true - maxDelay: - description: Maximum time delay between retry attempts (ISO - 8601 duration format) - type: string - multiplier: - description: Numeric value, if specified the delay between retries - is multiplied by this value. - properties: - floatVal: - type: number - strVal: - type: string - type: - description: Type represents the stored type of Float32OrString. - format: int64 - type: integer - type: object - name: - description: Unique retry strategy name - type: string - required: - - maxAttempts - - name - type: object - type: array - secrets: - description: |- - Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, - inside your Workflow Expressions. - items: - type: string - type: array - specVersion: - default: "0.8" - description: Serverless Workflow schema version - type: string - start: - description: Workflow start definition. - x-kubernetes-preserve-unknown-fields: true - states: - description: States ... - items: - properties: - callbackState: - description: callbackState executes a function and waits for - callback event that indicates completion of the task. - properties: - action: - description: Defines the action to be executed. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters state - data that can be used by the action. - type: string - results: - description: Workflow expression that filters the - actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate to - true for this action to be performed. If false, action - is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context attributes - to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should be - invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a 'consumed' - event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a 'produced' - event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should be - invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. String - containing a valid GraphQL selection set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - eventDataFilter: - description: Event data filter definition. - properties: - data: - description: Workflow expression that filters of the - event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRef: - description: References a unique callback event name in - the defined workflow events. - type: string - timeouts: - description: Time period to wait for incoming events (ISO - 8601 format) - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - eventTimeout: - description: Default timeout for consuming defined events - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - action - - eventRef - type: object - compensatedBy: - description: Unique Name of a workflow state which is responsible - for compensation of this state. - type: string - delayState: - description: delayState Causes the workflow execution to delay - for a specified duration. - properties: - timeDelay: - description: Amount of time (ISO 8601 format) to delay - type: string - required: - - timeDelay - type: object - end: - description: State end definition. - x-kubernetes-preserve-unknown-fields: true - eventState: - description: |- - event states await one or more events and perform actions when they are received. If defined as the - workflow starting state, the event state definition controls when the workflow instances should be created. - properties: - exclusive: - default: true - description: |- - If true consuming one of the defined events causes its associated actions to be performed. If false all - the defined events must be consumed in order for actions to be performed. Defaults to true. - type: boolean - onEvents: - description: Define the events to be consumed and optional - actions to be performed. - items: - description: OnEvents define which actions are be performed - for the one or more events. - properties: - actionMode: - default: sequential - description: Should actions be performed sequentially - or in parallel. Default is sequential. - enum: - - sequential - - parallel - type: string - actions: - description: Actions to be performed if expression - matches - items: - description: Action specify invocations of services - or other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If - false, action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' - reusable event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name - of a 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name - of a 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function - definition. - properties: - arguments: - description: Arguments (inputs) to be passed - to the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry - definition. If not defined uses the default - runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - type: array - eventDataFilter: - description: eventDataFilter defines the callback - event data filter definition - properties: - data: - description: Workflow expression that filters - of the event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRefs: - description: References one or more unique event names - in the defined workflow events. - items: - type: string - minItems: 1 - type: array - required: - - eventRefs - type: object - minItems: 1 - type: array - timeouts: - description: State specific timeouts. - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - eventTimeout: - description: Default timeout for consuming defined events - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - onEvents - type: object - forEachState: - description: forEachState used to execute actions for each element - of a data set. - properties: - actions: - description: Actions to be executed for each of the elements - of inputCollection. - items: - description: Action specify invocations of services or - other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If false, - action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a - 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a - 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 0 - type: array - batchSize: - anyOf: - - type: integer - - type: string - description: |- - Specifies how many iterations may run in parallel at the same time. Used if mode property is set to - parallel (default). If not specified, its value should be the size of the inputCollection. - x-kubernetes-int-or-string: true - inputCollection: - description: Workflow expression selecting an array element - of the states' data. - type: string - iterationParam: - description: |- - Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, - this param should contain a unique element of the inputCollection array. - type: string - mode: - default: parallel - description: Specifies how iterations are to be performed - (sequential or in parallel), defaults to parallel. - enum: - - sequential - - parallel - type: string - outputCollection: - description: Workflow expression specifying an array element - of the states data to add the results of each iteration. - type: string - timeouts: - description: State specific timeout. - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - inputCollection - type: object - id: - description: Unique State id. - type: string - injectState: - description: injectState used to inject static data into state - data input. - properties: - data: - description: JSON object which can be set as state's data - input and can be manipulated via filter - x-kubernetes-preserve-unknown-fields: true - timeouts: - description: State specific timeouts - properties: - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - data - type: object - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: State name. - type: string - onErrors: - description: States error handling and retries definitions. - items: - description: OnError ... - properties: - end: - description: |- - End workflow execution in case of this error. If retryRef is defined, this ends workflow only if - retries were unsuccessful. - x-kubernetes-preserve-unknown-fields: true - errorRef: - description: ErrorRef Reference to a unique workflow error - definition. Used of errorRefs is not used - type: string - errorRefs: - description: ErrorRefs References one or more workflow - error definitions. Used if errorRef is not used - items: - type: string - type: array - transition: - description: |- - Transition to next state to handle the error. If retryRef is defined, this transition is taken only if - retries were unsuccessful. - x-kubernetes-preserve-unknown-fields: true - type: object - type: array - operationState: - description: operationState defines a set of actions to be performed - in sequence or in parallel. - properties: - actionMode: - default: sequential - description: Specifies whether actions are performed in - sequence or in parallel, defaults to sequential. - enum: - - sequential - - parallel - type: string - actions: - description: Actions to be performed - items: - description: Action specify invocations of services or - other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If false, - action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a - 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a - 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 0 - type: array - timeouts: - description: State specific timeouts - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Defines workflow state execution timeout. - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - actions - type: object - parallelState: - description: parallelState Consists of a number of states that - are executed in parallel. - properties: - branches: - description: List of branches for this parallel state. - items: - description: Branch Definition - properties: - actions: - description: Actions to be executed in this branch - items: - description: Action specify invocations of services - or other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If - false, action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' - reusable event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name - of a 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name - of a 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function - definition. - properties: - arguments: - description: Arguments (inputs) to be passed - to the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry - definition. If not defined uses the default - runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 1 - type: array - name: - description: Branch name - type: string - timeouts: - description: Branch specific timeout settings - properties: - actionExecTimeout: - description: Single actions definition execution - timeout duration (ISO 8601 duration format) - type: string - branchExecTimeout: - description: Single branch execution timeout duration - (ISO 8601 duration format) - type: string - type: object - required: - - actions - - name - type: object - minItems: 1 - type: array - completionType: - default: allOf - description: Option types on how to complete branch execution. - Defaults to `allOf`. - enum: - - allOf - - atLeast - type: string - numCompleted: - anyOf: - - type: integer - - type: string - description: |- - Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete - in order for the state to transition/end. - x-kubernetes-int-or-string: true - timeouts: - description: State specific timeouts - properties: - branchExecTimeout: - description: Default single branch execution timeout - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - branches - type: object - sleepState: - description: sleepState suspends workflow execution for a given - time duration. - properties: - duration: - description: Duration (ISO 8601 duration format) to sleep - type: string - timeouts: - description: Timeouts State specific timeouts - properties: - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - duration - type: object - stateDataFilter: - description: State data filter. - properties: - input: - description: Workflow expression to filter the state data - input - type: string - output: - description: Workflow expression that filters the state - data output - type: string - type: object - switchState: - description: 'switchState is workflow''s gateways: direct transitions - onf a workflow based on certain conditions.' - properties: - dataConditions: - description: Defines conditions evaluated against data - items: - description: |- - DataCondition specify a data-based condition statement which causes a transition to another workflow state - if evaluated to true. - properties: - condition: - description: Workflow expression evaluated against - state data. Must evaluate to true or false. - type: string - end: - description: Explicit transition to end - properties: - compensate: - description: If set to true, triggers workflow - compensation before workflow execution completes. - Default is false. - type: boolean - continueAs: - description: |- - Defines that current workflow execution should stop, and execution should continue as a new workflow - instance of the provided id - properties: - data: - description: |- - If string type, an expression which selects parts of the states data output to become the workflow data input of - continued execution. If object type, a custom object to become the workflow data input of the continued execution - x-kubernetes-preserve-unknown-fields: true - version: - description: Version of the workflow to continue - execution as. - type: string - workflowExecTimeout: - description: |- - WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. - Overwrites any specific settings set by that workflow - properties: - duration: - default: unlimited - description: Workflow execution timeout - duration (ISO 8601 duration format). - If not specified should be 'unlimited'. - type: string - interrupt: - description: |- - If false, workflow instance is allowed to finish current execution. If true, current workflow execution - is stopped immediately. Default is false. - type: boolean - runBefore: - description: Name of a workflow state - to be executed before workflow instance - is terminated. - type: string - required: - - duration - type: object - workflowId: - description: Unique id of the workflow to - continue execution as. - type: string - required: - - workflowId - type: object - produceEvents: - description: Array of producedEvent definitions. - Defines events that should be produced. - items: - description: |- - ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a - workflow transitions. The eventRef property must match the name of one of the defined produced events in the - events definition. - properties: - contextAttributes: - additionalProperties: - type: string - description: Add additional event extension - context attributes. - type: object - data: - description: |- - If String, expression which selects parts of the states data output to become the data of the produced event. - If object a custom object to become the data of produced event. - x-kubernetes-preserve-unknown-fields: true - eventRef: - description: Reference to a defined unique - event name in the events definition - type: string - required: - - eventRef - type: object - type: array - terminate: - description: If true, completes all execution - flows in the given workflow instance. - type: boolean - type: object - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: Data condition name. - type: string - transition: - description: Workflow transition if condition is evaluated - to true - properties: - compensate: - default: false - description: If set to true, triggers workflow - compensation before this transition is taken. - Default is false. - type: boolean - nextState: - description: Name of the state to transition to - next. - type: string - produceEvents: - description: Array of producedEvent definitions. - Events to be produced before the transition - takes place. - items: - description: |- - ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a - workflow transitions. The eventRef property must match the name of one of the defined produced events in the - events definition. - properties: - contextAttributes: - additionalProperties: - type: string - description: Add additional event extension - context attributes. - type: object - data: - description: |- - If String, expression which selects parts of the states data output to become the data of the produced event. - If object a custom object to become the data of produced event. - x-kubernetes-preserve-unknown-fields: true - eventRef: - description: Reference to a defined unique - event name in the events definition - type: string - required: - - eventRef - type: object - type: array - required: - - nextState - type: object - required: - - condition - - end - type: object - type: array - defaultCondition: - description: |- - Default transition of the workflow if there is no matching data conditions. Can include a transition or - end definition. - properties: - end: - description: "\tIf this state an end state" - x-kubernetes-preserve-unknown-fields: true - transition: - description: |- - Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - Each state can define a transition definition that is used to determine which state to transition to next. - x-kubernetes-preserve-unknown-fields: true - type: object - eventConditions: - description: Defines conditions evaluated against events. - items: - description: EventCondition specify events which the switch - state must wait for. - properties: - end: - description: Explicit transition to end - x-kubernetes-preserve-unknown-fields: true - eventDataFilter: - description: Event data filter definition. - properties: - data: - description: Workflow expression that filters - of the event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRef: - description: References a unique event name in the - defined workflow events. - type: string - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: Event condition name. - type: string - transition: - description: Workflow transition if condition is evaluated - to true - x-kubernetes-preserve-unknown-fields: true - required: - - eventRef - type: object - type: array - timeouts: - description: SwitchState specific timeouts - properties: - eventTimeout: - description: |- - Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. - NOTE: this is only available for EventConditions - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - defaultCondition - type: object - transition: - description: Next transition of the workflow after the time - delay. - x-kubernetes-preserve-unknown-fields: true - type: - description: stateType can be any of delay, callback, event, - foreach, inject, operation, parallel, sleep, switch - enum: - - delay - - callback - - event - - foreach - - inject - - operation - - parallel - - sleep - - switch - type: string - usedForCompensation: - description: If true, this state is used to compensate another - state. Default is false. - type: boolean - required: - - name - - type - type: object - minItems: 1 - type: array - x-kubernetes-preserve-unknown-fields: true - timeouts: - description: Defines the workflow default timeout settings. - properties: - actionExecTimeout: - description: ActionExecTimeout Single actions definition execution - timeout duration (ISO 8601 duration format). - type: string - branchExecTimeout: - description: BranchExecTimeout Single branch execution timeout - duration (ISO 8601 duration format). - type: string - eventTimeout: - description: EventTimeout Timeout duration to wait for consuming - defined events (ISO 8601 duration format). - type: string - stateExecTimeout: - description: StateExecTimeout Total state execution timeout (including - retries) (ISO 8601 duration format). - properties: - single: - description: Single state execution timeout, not including - retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including retries - (ISO 8601 duration format) - type: string - required: - - total - type: object - workflowExecTimeout: - description: |- - WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should - be 'unlimited'. - properties: - duration: - default: unlimited - description: Workflow execution timeout duration (ISO 8601 - duration format). If not specified should be 'unlimited'. - type: string - interrupt: - description: |- - If false, workflow instance is allowed to finish current execution. If true, current workflow execution - is stopped immediately. Default is false. - type: boolean - runBefore: - description: Name of a workflow state to be executed before - workflow instance is terminated. - type: string - required: - - duration - type: object - type: object - version: - description: Workflow version. - type: string - required: - - specVersion - - states - type: object - status: - description: ServerlessWorkflowStatus ... - properties: - observedGeneration: - format: int64 - type: integer - type: object - type: object - served: true - storage: true - subresources: - status: {} diff --git a/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml b/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml deleted file mode 100644 index 5faa2c0..0000000 --- a/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This is an example on how the CR would look like when importing the sdk go types into your operator's spec -# the cr instantiation would be validated by this issue: https://github.com/serverlessworkflow/sdk-go/issues/152 - -apiVersion: io.serverlessworkflow/v1alpha1 -kind: ServerlessWorkflow -metadata: - name: custom.greeting -spec: - functions: - - name: greetFunction - type: custom - operation: sysout - states: - - name: ChooseOnLanguage - type: switch - dataConditions: - - condition: "${ .language == \"English\" }" - transition: GreetInEnglish - - condition: "${ .language == \"Spanish\" }" - transition: GreetInSpanish - - condition: "${ .language == \"Italian\" }" - transition: GreetInItalian - defaultCondition: GreetInEnglish - - name: GreetInEnglish - type: inject - data: - greeting: "Hello from JSON Workflow, " - transition: GreetPerson - - name: GreetInSpanish - type: inject - data: - greeting: "Saludos desde JSON Workflow, " - transition: GreetPerson - - name: GreetInItalian - type: inject - data: - greeting: "Saluti dal JSON Workflow, " - transition: GreetPerson - - name: GreetPerson - type: operation - actions: - - name: greetAction - functionRef: - refName: greetFunction - arguments: - message: ".greeting+.name" - end: true \ No newline at end of file diff --git a/go.mod b/go.mod index d2e3b7e..fc847fa 100644 --- a/go.mod +++ b/go.mod @@ -1,44 +1,29 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v3 -go 1.22.0 - -toolchain go1.23.1 +go 1.22 require ( - github.com/go-playground/validator/v10 v10.22.1 - github.com/pkg/errors v0.9.1 - github.com/relvacode/iso8601 v1.4.0 - github.com/sosodev/duration v1.3.1 - github.com/stretchr/testify v1.9.0 - gopkg.in/yaml.v3 v3.0.1 - k8s.io/apimachinery v0.31.1 - sigs.k8s.io/controller-runtime v0.19.0 + github.com/go-playground/validator/v10 v10.24.0 + github.com/itchyny/gojq v0.12.17 + github.com/stretchr/testify v1.10.0 + github.com/tidwall/gjson v1.18.0 sigs.k8s.io/yaml v1.4.0 ) require ( github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/fxamacker/cbor/v2 v2.7.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/google/gofuzz v1.2.0 // indirect - github.com/json-iterator/go v1.1.12 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/leodido/go-urn v1.4.0 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/x448/float16 v0.8.4 // indirect - golang.org/x/crypto v0.31.0 // indirect - golang.org/x/net v0.33.0 // indirect - golang.org/x/sys v0.28.0 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + golang.org/x/crypto v0.32.0 // indirect + golang.org/x/net v0.34.0 // indirect + golang.org/x/sys v0.29.0 // indirect golang.org/x/text v0.21.0 // indirect - gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - k8s.io/klog/v2 v2.130.1 // indirect - k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 393de63..257234a 100644 --- a/go.sum +++ b/go.sum @@ -1,124 +1,46 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= -github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= +github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= -github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= -github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= -github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg= +github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af h1:kmjWCqn2qkEml422C2Rrd27c3VGxi6a/6HNq8QmHRKM= -github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af/go.mod h1:K1liHPHnj73Fdn/EKuT8nrFqBihUSKXoLYU0BuatOYo= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= +github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= +github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= +github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA= -github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To= -github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= -github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/relvacode/iso8601 v1.4.0 h1:GsInVSEJfkYuirYFxa80nMLbH2aydgZpIf52gYZXUJs= -github.com/relvacode/iso8601 v1.4.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= -github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4= -github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= -github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= -golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= -golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= +golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/api v0.31.0 h1:b9LiSjR2ym/SzTOlfMHm1tr7/21aD7fSkqgD/CVJBCo= -k8s.io/api v0.31.0/go.mod h1:0YiFF+JfFxMM6+1hQei8FY8M7s1Mth+z/q7eF1aJkTE= -k8s.io/apimachinery v0.31.1 h1:mhcUBbj7KUjaVhyXILglcVjuS4nYXiwC+KKFBgIVy7U= -k8s.io/apimachinery v0.31.1/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo= -k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= -k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= -k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 h1:pUdcCO1Lk/tbT5ztQWOBi5HBgbBP1J8+AsQnQCKsi8A= -k8s.io/utils v0.0.0-20240711033017-18e509b52bc8/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -sigs.k8s.io/controller-runtime v0.19.0 h1:nWVM7aq+Il2ABxwiCizrVDSlmDcshi9llbaFbC0ji/Q= -sigs.k8s.io/controller-runtime v0.19.0/go.mod h1:iRmWllt8IlaLjvTTDLhRBXIEtkCK6hwVBJJsYS9Ajf4= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/hack/builder-gen.sh b/hack/builder-gen.sh deleted file mode 100755 index 083b187..0000000 --- a/hack/builder-gen.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh -# and adapted to only install and run the deepcopy-gen - -set -o errexit -set -o nounset -set -o pipefail - -SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. -echo "Script root is $SCRIPT_ROOT" - -GENS="$1" -shift 1 - -( - # To support running this script from anywhere, first cd into this directory, - # and then install with forced module mode on and fully qualified name. - # make sure your GOPATH env is properly set. - # it will go under $GOPATH/bin - cd "$(dirname "${0}")" - GO111MODULE=on go install github.com/galgotech/builder-gen@latest -) - -function codegen::join() { local IFS="$1"; shift; echo "$*"; } - -if [ "${GENS}" = "all" ] || grep -qw "buildergen" <<<"${GENS}"; then - echo "Generating buildergen funcs" - export GO111MODULE=on - # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/builder-gen" -v 1 \ - --input-dirs ./model -O zz_generated.buildergen \ - --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" - "$@" -fi diff --git a/hack/conv/main.go b/hack/conv/main.go deleted file mode 100644 index e70e738..0000000 --- a/hack/conv/main.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "encoding/json" - "errors" - "log" - "os" - "path" - "path/filepath" - "strings" - - "gopkg.in/yaml.v3" - - "github.com/serverlessworkflow/sdk-go/v2/test" -) - -func convert(i interface{}) interface{} { - switch x := i.(type) { - case map[interface{}]interface{}: - m2 := map[string]interface{}{} - for k, v := range x { - m2[k.(string)] = convert(v) - } - return m2 - case []interface{}: - for i, v := range x { - x[i] = convert(v) - } - } - return i -} - -func transform( - files []string, - srcFormat string, - destFormat string, - unmarshal func(data []byte, out interface{}) error, - marshal func(in interface{}) ([]byte, error), -) { - for _, srcFile := range files { - if !strings.HasSuffix(srcFile, srcFormat) { - log.Printf("%s is not %s format, skip it", srcFile, srcFormat) - continue - } - - destFile := srcFile[0:len(srcFile)-len(srcFormat)] + destFormat - if _, err := os.Stat(destFile); err == nil { - log.Printf("ERR: the target file %v exists, skip it", destFile) - continue - } else if !errors.Is(err, os.ErrNotExist) { - log.Printf("ERR: stat target file %v, %v, skip it", destFile, err) - continue - } - - srcData, err := os.ReadFile(filepath.Clean(srcFile)) - if err != nil { - log.Printf("ERR: cannot read file %v, %v, skip it", srcFile, err) - continue - } - - var srcObj interface{} - err = unmarshal(srcData, &srcObj) - if err != nil { - log.Printf("ERR: cannot unmarshal file %v to %s, %v, skip it", srcFile, srcFormat, err) - continue - } - - destObj := convert(srcObj) - destData, err := marshal(destObj) - if err != nil { - log.Printf("ERR: cannot marshal fild %v data to %v, %v, skip it", srcFile, destFormat, err) - continue - } - - err = os.WriteFile(destFile, destData, 0600) - if err != nil { - log.Printf("ERR: cannot write to file %v, %v, skip it", destFile, err) - continue - } - - log.Printf("convert %v to %v done", srcFile, destFile) - } -} - -func main() { - // TODO: make this as argument - dir := path.Join(test.CurrentProjectPath(), "parser", "testdata", "workflows", "urifiles") - dirEntries, err := os.ReadDir(dir) - if err != nil { - panic(err) - } - - files := make([]string, 0, len(dirEntries)) - for _, entry := range dirEntries { - if entry.IsDir() { - log.Printf("%s is directory, skip it", entry.Name()) - continue - } - - files = append(files, path.Join(dir, entry.Name())) - } - - log.Printf("found %v files", len(files)) - - // First, convert all json format files to yaml - log.Printf("start to convert all json format files to yaml format") - transform(files, ".json", ".yaml", json.Unmarshal, yaml.Marshal) - - // Second, convert all yaml format files to json - log.Printf("start to convert all yaml format files to json format") - transform(files, ".yaml", ".json", yaml.Unmarshal, func(in interface{}) ([]byte, error) { - return json.MarshalIndent(in, "", " ") - }) -} diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh deleted file mode 100755 index 9c7fe0f..0000000 --- a/hack/deepcopy-gen.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh -# and adapted to only install and run the deepcopy-gen - -set -o errexit -set -o nounset -set -o pipefail - -SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. -echo "Script root is $SCRIPT_ROOT" - -GENS="$1" -shift 1 - -( - # To support running this script from anywhere, first cd into this directory, - # and then install with forced module mode on and fully qualified name. - # make sure your GOPATH env is properly set. - # it will go under $GOPATH/bin - cd "$(dirname "${0}")" - DEEPCOPY_VERSION="v0.29.4" - GO111MODULE=on go install k8s.io/code-generator/cmd/deepcopy-gen@${DEEPCOPY_VERSION} -) - -function codegen::join() { local IFS="$1"; shift; echo "$*"; } - -if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then - echo "Generating deepcopy funcs" - export GO111MODULE=on - # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/deepcopy-gen" -v 1 \ - --input-dirs ./model -O zz_generated.deepcopy \ - --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" - "$@" -fi diff --git a/hack/integration-test.sh b/hack/integration-test.sh new file mode 100755 index 0000000..52f6889 --- /dev/null +++ b/hack/integration-test.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Script to fetch workflow examples, parse, and validate them using the Go parser. + +# Variables +SPEC_REPO="https://github.com/serverlessworkflow/specification" +EXAMPLES_DIR="examples" +PARSER_BINARY="./parser/cmd/main.go" +JUNIT_FILE="./integration-test-junit.xml" + +# Create a temporary directory +TEMP_DIR=$(mktemp -d) + +# Ensure temporary directory was created +if [ ! -d "$TEMP_DIR" ]; then + echo "โŒ Failed to create a temporary directory." + exit 1 +fi + +# shellcheck disable=SC2317 +# Clean up the temporary directory on script exit +cleanup() { + echo "๐Ÿงน Cleaning up temporary directory..." + rm -rf "$TEMP_DIR" +} +trap cleanup EXIT + +# Fetch the examples directory +echo "๐Ÿ“ฅ Fetching workflow examples from ${SPEC_REPO}/${EXAMPLES_DIR}..." +if ! git clone --depth=1 --filter=blob:none --sparse "$SPEC_REPO" "$TEMP_DIR" &> /dev/null; then + echo "โŒ Failed to clone specification repository." + exit 1 +fi + +cd "$TEMP_DIR" || exit +if ! git sparse-checkout set "$EXAMPLES_DIR" &> /dev/null; then + echo "โŒ Failed to checkout examples directory." + exit 1 +fi + +cd - || exit + +# Prepare JUnit XML output +echo '' > "$JUNIT_FILE" +echo '' >> "$JUNIT_FILE" + +# Initialize test summary +total_tests=0 +failed_tests=0 + +# Walk through files and validate +echo "โš™๏ธ Running parser on fetched examples..." +while IFS= read -r file; do + filename=$(basename "$file") + echo "๐Ÿ” Validating: $filename" + + # Run the parser for the file + if go run "$PARSER_BINARY" "$file" > "$TEMP_DIR/validation.log" 2>&1; then + echo "โœ… Validation succeeded for $filename" + echo " " >> "$JUNIT_FILE" + else + echo "โŒ Validation failed for $filename" + failure_message=$(cat "$TEMP_DIR/validation.log" | sed 's/&/&/g; s//>/g') + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + ((failed_tests++)) + fi + + ((total_tests++)) +done < <(find "$TEMP_DIR/$EXAMPLES_DIR" -type f \( -name "*.yaml" -o -name "*.yml" -o -name "*.json" \)) + +# Finalize JUnit XML output +echo '' >> "$JUNIT_FILE" + +# Display test summary +if [ $failed_tests -ne 0 ]; then + echo "โŒ Validation failed for $failed_tests out of $total_tests workflows." + exit 1 +else + echo "โœ… All $total_tests workflows validated successfully." +fi + +exit 0 diff --git a/kubernetes/api/v1alpha1/groupversion_info.go b/kubernetes/api/v1alpha1/groupversion_info.go deleted file mode 100644 index 135263e..0000000 --- a/kubernetes/api/v1alpha1/groupversion_info.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package v1alpha1 contains API Schema definitions for the serverless v1alpha1 API group -// +kubebuilder:object:generate=true -// +groupName=serverlessworkflow.io -package v1alpha1 - -import ( - "k8s.io/apimachinery/pkg/runtime/schema" - "sigs.k8s.io/controller-runtime/pkg/scheme" -) - -var ( - // GroupVersion is group version used to register these objects - GroupVersion = schema.GroupVersion{Group: "io.serverlessworkflow", Version: "v1alpha1"} - - // SchemeBuilder is used to add go types to the GroupVersionKind scheme - SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} - - // AddToScheme adds the types in this group-version to the given scheme. - AddToScheme = SchemeBuilder.AddToScheme -) - -// Resource takes an unqualified resource and returns a Group qualified GroupResource. -func Resource(resource string) schema.GroupResource { - return GroupVersion.WithResource(resource).GroupResource() -} diff --git a/kubernetes/api/v1alpha1/serverlessworkflow_types.go b/kubernetes/api/v1alpha1/serverlessworkflow_types.go deleted file mode 100644 index 7144062..0000000 --- a/kubernetes/api/v1alpha1/serverlessworkflow_types.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package v1alpha1 - -import ( - "github.com/serverlessworkflow/sdk-go/v2/model" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -// This package provides a very simple api for kubernetes operator to test the integration -// of the Serverless SDK-Go with operator-sdk controller-gen and deepcopy-gen tools. -// The purpose of this integration is to stop issues like below beforehand: -// github.com/serverlessworkflow/sdk-go/model/event.go:51:2: encountered struct field "" without JSON tag in type "Event" -// github.com/serverlessworkflow/sdk-go/model/states.go:66:12: unsupported AST kind *ast.InterfaceType - -// States should be objects that will be in the same array even if it belongs to -// different types. An issue similar to the below will happen when trying to deploy your custom CR: -// strict decoding error: unknown field "spec.states[0].dataConditions" -// To make the CRD is compliant to the specs there are two options, -// a flat struct with all states fields at the same level, -// or use the // +kubebuilder:pruning:PreserveUnknownFields -// kubebuilder validator and delegate the validation to the sdk-go validator using the admission webhook. -// TODO add a webhook example - -// ServerlessWorkflowSpec defines a base API for integration test with operator-sdk -// +k8s:openapi-gen=true -type ServerlessWorkflowSpec struct { - model.Workflow `json:",inline"` -} - -// ServerlessWorkflow ... -// +kubebuilder:object:root=true -// +kubebuilder:object:generate=true -// +kubebuilder:subresource:status -// +k8s:openapi-gen=true -type ServerlessWorkflow struct { - metav1.TypeMeta `json:",inline"` - metav1.ObjectMeta `json:"metadata,omitempty"` - - Spec ServerlessWorkflowSpec `json:"spec,omitempty"` - Status ServerlessWorkflowStatus `json:"status,omitempty"` -} - -// ServerlessWorkflowStatus ... -// +k8s:openapi-gen=true -type ServerlessWorkflowStatus struct { - // add your conditions struct here ... - - // +optional - ObservedGeneration int64 `json:"observedGeneration,omitempty"` -} - -// ServerlessWorkflowList contains a list of SDKServerlessWorkflow -// +kubebuilder:object:root=true -// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object -type ServerlessWorkflowList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []ServerlessWorkflow `json:"items"` -} - -func init() { - SchemeBuilder.Register(&ServerlessWorkflow{}, &ServerlessWorkflowList{}) -} diff --git a/kubernetes/api/v1alpha1/zz_generated.deepcopy.go b/kubernetes/api/v1alpha1/zz_generated.deepcopy.go deleted file mode 100644 index 453a82c..0000000 --- a/kubernetes/api/v1alpha1/zz_generated.deepcopy.go +++ /dev/null @@ -1,113 +0,0 @@ -//go:build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by controller-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - runtime "k8s.io/apimachinery/pkg/runtime" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflow) DeepCopyInto(out *ServerlessWorkflow) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - out.Status = in.Status -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflow. -func (in *ServerlessWorkflow) DeepCopy() *ServerlessWorkflow { - if in == nil { - return nil - } - out := new(ServerlessWorkflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *ServerlessWorkflow) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowList) DeepCopyInto(out *ServerlessWorkflowList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]ServerlessWorkflow, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowList. -func (in *ServerlessWorkflowList) DeepCopy() *ServerlessWorkflowList { - if in == nil { - return nil - } - out := new(ServerlessWorkflowList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *ServerlessWorkflowList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowSpec) DeepCopyInto(out *ServerlessWorkflowSpec) { - *out = *in - in.Workflow.DeepCopyInto(&out.Workflow) -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowSpec. -func (in *ServerlessWorkflowSpec) DeepCopy() *ServerlessWorkflowSpec { - if in == nil { - return nil - } - out := new(ServerlessWorkflowSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowStatus) DeepCopyInto(out *ServerlessWorkflowStatus) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowStatus. -func (in *ServerlessWorkflowStatus) DeepCopy() *ServerlessWorkflowStatus { - if in == nil { - return nil - } - out := new(ServerlessWorkflowStatus) - in.DeepCopyInto(out) - return out -} diff --git a/model/action.go b/model/action.go deleted file mode 100644 index 2635849..0000000 --- a/model/action.go +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// Action specify invocations of services or other workflows during workflow execution. -// +builder-gen:new-call=ApplyDefault -type Action struct { - // Defines Unique action identifier. - // +optional - ID string `json:"id,omitempty"` - // Defines Unique action name. - // +optional - Name string `json:"name,omitempty"` - // References a reusable function definition. - // +optional - FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions. - // +optional - EventRef *EventRef `json:"eventRef,omitempty"` - // References a workflow to be invoked. - // +optional - SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` - // Defines time period workflow execution should sleep before / after function execution. - // +optional - Sleep *Sleep `json:"sleep,omitempty"` - // References a defined workflow retry definition. If not defined uses the default runtime retry definition. - // +optional - RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. - // Used only when `autoRetries` is set to `true` - // +optional - NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. - // Used only when `autoRetries` is set to `false` - // +optional - RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Filter the state data to select only the data that can be used within function definition arguments - // using its fromStateData property. Filter the action results to select only the result data that should - // be added/merged back into the state data using its results property. Select the part of state data which - // the action data results should be added/merged to using the toStateData property. - // +optional - ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` - // Expression, if defined, must evaluate to true for this action to be performed. If false, action is disregarded. - // +optional - Condition string `json:"condition,omitempty"` -} - -type actionUnmarshal Action - -// UnmarshalJSON implements json.Unmarshaler -func (a *Action) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("action", data, (*actionUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action -func (a *Action) ApplyDefault() { - a.ActionDataFilter.ApplyDefault() -} - -// FunctionRef defines the reference to a reusable function definition -// +builder-gen:new-call=ApplyDefault -type FunctionRef struct { - // Name of the referenced function. - // +kubebuilder:validation:Required - RefName string `json:"refName" validate:"required"` - // Arguments (inputs) to be passed to the referenced function - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // TODO: validate it as required if function type is graphql - Arguments map[string]Object `json:"arguments,omitempty"` - // Used if function type is graphql. String containing a valid GraphQL selection set. - // TODO: validate it as required if function type is graphql - // +optional - SelectionSet string `json:"selectionSet,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type functionRefUnmarshal FunctionRef - -// UnmarshalJSON implements json.Unmarshaler -func (f *FunctionRef) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function Ref -func (f *FunctionRef) ApplyDefault() { - f.Invoke = InvokeKindSync -} - -// Sleep defines time periods workflow execution should sleep before & after function execution -type Sleep struct { - // Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - Before string `json:"before,omitempty" validate:"omitempty,iso8601duration"` - // Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` -} - -type sleepUnmarshal Sleep - -// UnmarshalJSON implements json.Unmarshaler -func (s *Sleep) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("sleep", data, (*sleepUnmarshal)(s)) -} diff --git a/model/action_data_filter.go b/model/action_data_filter.go deleted file mode 100644 index e929f6b..0000000 --- a/model/action_data_filter.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// ActionDataFilter used to filter action data results. -// +optional -// +builder-gen:new-call=ApplyDefault -type ActionDataFilter struct { - // Workflow expression that filters state data that can be used by the action. - // +optional - FromStateData string `json:"fromStateData,omitempty"` - // If set to false, action data results are not added/merged to state data. In this case 'results' - // and 'toStateData' should be ignored. Default is true. - // +optional - UseResults bool `json:"useResults,omitempty"` - // Workflow expression that filters the actions data results. - // +optional - Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be - // added/merged into. If not specified denotes the top-level state data element. - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type actionDataFilterUnmarshal ActionDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (a *ActionDataFilter) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action Data Filter -func (a *ActionDataFilter) ApplyDefault() { - a.UseResults = true -} diff --git a/model/action_data_filter_test.go b/model/action_data_filter_test.go deleted file mode 100644 index cae511a..0000000 --- a/model/action_data_filter_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestActionDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ActionDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: true, - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3", "useResults": false}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: false, - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: ActionDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"fromStateData": 1, "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{}, - err: `actionDataFilter.fromStateData must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ActionDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_test.go b/model/action_test.go deleted file mode 100644 index 55c399d..0000000 --- a/model/action_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFunctionRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect FunctionRef - err string - } - - testCases := []testCase{ - { - desp: "invalid object refName", - data: `{"refName": 1}`, - expect: FunctionRef{}, - err: "functionRef.refName must be string", - }, - { - desp: "object with refName", - data: `{"refName": "function name"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "object with refName and Invoke", - data: `{"refName": "function name", "invoke": "async"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "refName string", - data: `"function name"`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - } - - for _, tc := range testCases[:1] { - t.Run(tc.desp, func(t *testing.T) { - var v FunctionRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_validator.go b/model/action_validator.go deleted file mode 100644 index 3fac375..0000000 --- a/model/action_validator.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(actionStructLevelValidationCtx), Action{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(functionRefStructLevelValidation), FunctionRef{}) -} - -func actionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - action := structLevel.Current().Interface().(Action) - - if action.FunctionRef == nil && action.EventRef == nil && action.SubFlowRef == nil { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", "required_without", "") - return - } - - values := []bool{ - action.FunctionRef != nil, - action.EventRef != nil, - action.SubFlowRef != nil, - } - - if validationNotExclusiveParameters(values) { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", val.TagExclusive, "") - structLevel.ReportError(action.EventRef, "EventRef", "EventRef", val.TagExclusive, "") - structLevel.ReportError(action.SubFlowRef, "SubFlowRef", "SubFlowRef", val.TagExclusive, "") - } - - if action.RetryRef != "" && !ctx.ExistRetry(action.RetryRef) { - structLevel.ReportError(action.RetryRef, "RetryRef", "RetryRef", val.TagExists, "") - } -} - -func functionRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - functionRef := structLevel.Current().Interface().(FunctionRef) - if !ctx.ExistFunction(functionRef.RefName) { - structLevel.ReportError(functionRef.RefName, "RefName", "RefName", val.TagExists, functionRef.RefName) - } -} diff --git a/model/action_validator_test.go b/model/action_validator_test.go deleted file mode 100644 index 84424b5..0000000 --- a/model/action_validator_test.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildActionByOperationState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.OperationState.Actions = append(state.OperationState.Actions, action) - return &state.OperationState.Actions[len(state.OperationState.Actions)-1] -} - -func buildActionByForEachState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.ForEachState.Actions = append(state.ForEachState.Actions, action) - return &state.ForEachState.Actions[len(state.ForEachState.Actions)-1] -} - -func buildActionByBranch(branch *Branch, name string) *Action { - action := Action{ - Name: name, - } - - branch.Actions = append(branch.Actions, action) - return &branch.Actions[len(branch.Actions)-1] -} - -func buildFunctionRef(workflow *Workflow, action *Action, name string) (*FunctionRef, *Function) { - function := Function{ - Name: name, - Operation: "http://function/function_name", - Type: FunctionTypeREST, - } - - functionRef := FunctionRef{ - RefName: name, - Invoke: InvokeKindSync, - } - action.FunctionRef = &functionRef - - workflow.Functions = append(workflow.Functions, function) - return &functionRef, &function -} - -func buildRetryRef(workflow *Workflow, action *Action, name string) { - retry := Retry{ - Name: name, - MaxAttempts: intstr.FromInt32(1), - } - - workflow.Retries = append(workflow.Retries, retry) - action.RetryRef = name -} - -func buildSleep(action *Action) *Sleep { - action.Sleep = &Sleep{ - Before: "PT5S", - After: "PT5S", - } - return action.Sleep -} - -func TestActionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "require_without", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef = nil - return *model - }, - Err: `workflow.states[0].actions[0].functionRef required when "eventRef" or "subFlowRef" is not defined`, - }, - { - Desp: "exclude", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventRef(model, &model.States[0].OperationState.Actions[0], "event 1", "event2") - return *model - }, - Err: `workflow.states[0].actions[0].functionRef exclusive -workflow.states[0].actions[0].eventRef exclusive -workflow.states[0].actions[0].subFlowRef exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestFunctionRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.RefName = "invalid function" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.refName don't exist "invalid function"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildSleep(action1) - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "" - model.States[0].OperationState.Actions[0].Sleep.After = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "P5S" - model.States[0].OperationState.Actions[0].Sleep.After = "P5S" - return *model - }, - Err: `workflow.states[0].actions[0].sleep.before invalid iso8601 duration "P5S" -workflow.states[0].actions[0].sleep.after invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} diff --git a/model/auth.go b/model/auth.go deleted file mode 100644 index 6632265..0000000 --- a/model/auth.go +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// AuthType can be "basic", "bearer", or "oauth2". Default is "basic" -type AuthType string - -func (i AuthType) KindValues() []string { - return []string{ - string(AuthTypeBasic), - string(AuthTypeBearer), - string(AuthTypeOAuth2), - } -} - -func (i AuthType) String() string { - return string(i) -} - -const ( - // AuthTypeBasic ... - AuthTypeBasic AuthType = "basic" - // AuthTypeBearer ... - AuthTypeBearer AuthType = "bearer" - // AuthTypeOAuth2 ... - AuthTypeOAuth2 AuthType = "oauth2" -) - -// GrantType ... -type GrantType string - -func (i GrantType) KindValues() []string { - return []string{ - string(GrantTypePassword), - string(GrantTypeClientCredentials), - string(GrantTypeTokenExchange), - } -} - -func (i GrantType) String() string { - return string(i) -} - -const ( - // GrantTypePassword ... - GrantTypePassword GrantType = "password" - // GrantTypeClientCredentials ... - GrantTypeClientCredentials GrantType = "clientCredentials" - // GrantTypeTokenExchange ... - GrantTypeTokenExchange GrantType = "tokenExchange" -) - -// Auth definitions can be used to define authentication information that should be applied to resources -// defined in the operation property of function definitions. It is not used as authentication information -// for the function invocation, but just to access the resource containing the function invocation information. -type Auth struct { - // Unique auth definition name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Auth scheme, can be "basic", "bearer", or "oauth2". Default is "basic" - // +kubebuilder:validation:Enum=basic;bearer;oauth2 - // +kubebuilder:default=basic - // +kubebuilder:validation:Required - Scheme AuthType `json:"scheme" validate:"required,oneofkind"` - // Auth scheme properties. Can be one of "Basic properties definition", "Bearer properties definition", - // or "OAuth2 properties definition" - // +kubebuilder:validation:Required - Properties AuthProperties `json:"properties" validate:"required"` -} - -type authUnmarshal Auth - -// UnmarshalJSON Auth definition -func (a *Auth) UnmarshalJSON(data []byte) error { - authTmp := struct { - authUnmarshal - PropertiesRaw json.RawMessage `json:"properties"` - }{} - - err := util.UnmarshalObjectOrFile("auth", data, &authTmp) - if err != nil { - return err - } - - *a = Auth(authTmp.authUnmarshal) - if len(a.Scheme) == 0 { - a.Scheme = AuthTypeBasic - } - - switch a.Scheme { - case AuthTypeBasic: - a.Properties.Basic = &BasicAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) - case AuthTypeBearer: - a.Properties.Bearer = &BearerAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) - case AuthTypeOAuth2: - a.Properties.OAuth2 = &OAuth2AuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) - default: - return fmt.Errorf("failed to parse auth properties") - } -} - -func (a *Auth) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - Name string `json:"name" validate:"required"` - Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` - Properties AuthProperties `json:"properties" validate:"required"` - }{ - Name: a.Name, - Scheme: a.Scheme, - Properties: a.Properties, - }) - if err != nil { - fmt.Println(err) - } - st := strings.Replace(string(custom), "null,", "", 1) - st = strings.Replace(st, "\"Basic\":", "", 1) - st = strings.Replace(st, "\"Oauth2\":", "", 1) - st = strings.Replace(st, "\"Bearer\":", "", 1) - st = strings.Replace(st, "{{", "{", 1) - st = strings.TrimSuffix(st, "}") - return []byte(st), nil -} - -// AuthProperties ... -type AuthProperties struct { - Basic *BasicAuthProperties `json:",omitempty"` - Bearer *BearerAuthProperties `json:",omitempty"` - OAuth2 *OAuth2AuthProperties `json:",omitempty"` -} - -// BasicAuthProperties Basic Auth Info -type BasicAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Username String or a workflow expression. Contains the username - // +kubebuilder:validation:Required - Username string `json:"username" validate:"required"` - // Password String or a workflow expression. Contains the user password - // +kubebuilder:validation:Required - Password string `json:"password" validate:"required"` -} - -// BearerAuthProperties Bearer auth information -type BearerAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Token String or a workflow expression. Contains the token - // +kubebuilder:validation:Required - Token string `json:"token" validate:"required"` -} - -// OAuth2AuthProperties OAuth2 information -type OAuth2AuthProperties struct { - Common `json:",inline"` - // Expression referencing a workflow secret that contains all needed auth info. - // +optional - Secret string `json:"secret,omitempty"` - // String or a workflow expression. Contains the authority information. - // +optional - Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` - // Defines the grant type. Can be "password", "clientCredentials", or "tokenExchange" - // +kubebuilder:validation:Enum=password;clientCredentials;tokenExchange - // +kubebuilder:validation:Required - GrantType GrantType `json:"grantType" validate:"required,oneofkind"` - // String or a workflow expression. Contains the client identifier. - // +kubebuilder:validation:Required - ClientID string `json:"clientId" validate:"required"` - // Workflow secret or a workflow expression. Contains the client secret. - // +optional - ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 scopes. - // +optional - Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner'. - // +optional - Username string `json:"username,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner'. - // +optional - Password string `json:"password,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 audiences. - // +optional - Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the subject token. - // +optional - SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested subject. - // +optional - RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested issuer. - // +optional - RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` -} - -// TODO: use reflection to unmarshal the keys and think on a generic approach to handle them diff --git a/model/auth_test.go b/model/auth_test.go deleted file mode 100644 index 60602a2..0000000 --- a/model/auth_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { - t.Run("BearerAuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "bearer", - "properties": { - "token": "token1" - } - }` - a2JSON := `{ - "name": "a2", - "scheme": "bearer", - "properties": { - "token": "token2" - } - }` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.Bearer - a2Properties := a2.Properties.Bearer - - assert.Equal(t, "token1", a1Properties.Token) - assert.Equal(t, "token2", a2Properties.Token) - assert.NotEqual(t, a1Properties, a2Properties) - }) - - t.Run("OAuth2AuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret1" - } -}` - - a2JSON := `{ - "name": "a2", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret2" - } -}` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.OAuth2 - a2Properties := a2.Properties.OAuth2 - - assert.Equal(t, "secret1", a1Properties.ClientSecret) - assert.Equal(t, "secret2", a2Properties.ClientSecret) - assert.NotEqual(t, a1Properties, a2Properties) - }) -} diff --git a/model/auth_validator_test.go b/model/auth_validator_test.go deleted file mode 100644 index e2ce55d..0000000 --- a/model/auth_validator_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildAuth(workflow *Workflow, name string) *Auth { - auth := Auth{ - Name: name, - Scheme: AuthTypeBasic, - } - workflow.Auth = append(workflow.Auth, auth) - return &workflow.Auth[len(workflow.Auth)-1] -} - -func buildBasicAuthProperties(auth *Auth) *BasicAuthProperties { - auth.Scheme = AuthTypeBasic - auth.Properties = AuthProperties{ - Basic: &BasicAuthProperties{ - Username: "username", - Password: "password", - }, - } - - return auth.Properties.Basic -} - -func buildOAuth2AuthProperties(auth *Auth) *OAuth2AuthProperties { - auth.Scheme = AuthTypeOAuth2 - auth.Properties = AuthProperties{ - OAuth2: &OAuth2AuthProperties{ - ClientID: "clientId", - GrantType: GrantTypePassword, - }, - } - - return auth.Properties.OAuth2 -} - -func buildBearerAuthProperties(auth *Auth) *BearerAuthProperties { - auth.Scheme = AuthTypeBearer - auth.Properties = AuthProperties{ - Bearer: &BearerAuthProperties{ - Token: "token", - }, - } - - return auth.Properties.Bearer -} - -func TestAuthStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Name = "" - return *model - }, - Err: `workflow.auth[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth = append(model.Auth, model.Auth[0]) - return *model - }, - Err: `workflow.auth has duplicate "name"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestBasicAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Basic.Username = "" - model.Auth[0].Properties.Basic.Password = "" - return *model - }, - Err: `workflow.auth[0].properties.basic.username is required -workflow.auth[0].properties.basic.password is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBearerAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBearerAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Bearer.Token = "" - return *model - }, - Err: `workflow.auth[0].properties.bearer.token is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOAuth2AuthPropertiesPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildOAuth2AuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = "" - model.Auth[0].Properties.OAuth2.ClientID = "" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType is required -workflow.auth[0].properties.oAuth2.clientID is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = GrantTypePassword + "invalid" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType need by one of [password clientCredentials tokenExchange]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/authentication.go b/model/authentication.go new file mode 100644 index 0000000..35f06a4 --- /dev/null +++ b/model/authentication.go @@ -0,0 +1,187 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// AuthenticationPolicy Defines an authentication policy. +type AuthenticationPolicy struct { + Basic *BasicAuthenticationPolicy `json:"basic,omitempty"` + Bearer *BearerAuthenticationPolicy `json:"bearer,omitempty"` + Digest *DigestAuthenticationPolicy `json:"digest,omitempty"` + OAuth2 *OAuth2AuthenticationPolicy `json:"oauth2,omitempty"` + OIDC *OpenIdConnectAuthenticationPolicy `json:"oidc,omitempty"` +} + +// UnmarshalJSON for AuthenticationPolicy to enforce "oneOf" behavior. +func (ap *AuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Create temporary maps to detect which field is populated + temp := struct { + Basic json.RawMessage `json:"basic"` + Bearer json.RawMessage `json:"bearer"` + Digest json.RawMessage `json:"digest"` + OAuth2 json.RawMessage `json:"oauth2"` + OIDC json.RawMessage `json:"oidc"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if len(temp.Basic) > 0 { + count++ + ap.Basic = &BasicAuthenticationPolicy{} + if err := json.Unmarshal(temp.Basic, ap.Basic); err != nil { + return err + } + } + if len(temp.Bearer) > 0 { + count++ + ap.Bearer = &BearerAuthenticationPolicy{} + if err := json.Unmarshal(temp.Bearer, ap.Bearer); err != nil { + return err + } + } + if len(temp.Digest) > 0 { + count++ + ap.Digest = &DigestAuthenticationPolicy{} + if err := json.Unmarshal(temp.Digest, ap.Digest); err != nil { + return err + } + } + if len(temp.OAuth2) > 0 { + count++ + ap.OAuth2 = &OAuth2AuthenticationPolicy{} + if err := json.Unmarshal(temp.OAuth2, ap.OAuth2); err != nil { + return err + } + } + if len(temp.OIDC) > 0 { + count++ + ap.OIDC = &OpenIdConnectAuthenticationPolicy{} + if err := json.Unmarshal(temp.OIDC, ap.OIDC); err != nil { + return err + } + } + + // Ensure only one field is set + if count != 1 { + return errors.New("invalid AuthenticationPolicy: only one authentication type must be specified") + } + return nil +} + +// MarshalJSON for AuthenticationPolicy. +func (ap *AuthenticationPolicy) MarshalJSON() ([]byte, error) { + if ap.Basic != nil { + return json.Marshal(map[string]interface{}{"basic": ap.Basic}) + } + if ap.Bearer != nil { + return json.Marshal(map[string]interface{}{"bearer": ap.Bearer}) + } + if ap.Digest != nil { + return json.Marshal(map[string]interface{}{"digest": ap.Digest}) + } + if ap.OAuth2 != nil { + return json.Marshal(map[string]interface{}{"oauth2": ap.OAuth2}) + } + if ap.OIDC != nil { + return json.Marshal(map[string]interface{}{"oidc": ap.OIDC}) + } + // Add logic for other fields... + return nil, errors.New("invalid AuthenticationPolicy: no valid configuration to marshal") +} + +// ReferenceableAuthenticationPolicy represents a referenceable authentication policy. +type ReferenceableAuthenticationPolicy struct { + Use *string `json:"use,omitempty"` + AuthenticationPolicy *AuthenticationPolicy `json:",inline"` +} + +// UnmarshalJSON for ReferenceableAuthenticationPolicy enforces the "oneOf" behavior. +func (rap *ReferenceableAuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Temporary structure to detect which field is populated + temp := struct { + Use *string `json:"use"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Check if `use` is set + if temp.Use != nil { + rap.Use = temp.Use + return nil + } + + // If `use` is not set, try unmarshalling inline AuthenticationPolicy + var ap AuthenticationPolicy + if err := json.Unmarshal(data, &ap); err != nil { + return err + } + + rap.AuthenticationPolicy = &ap + return nil +} + +// MarshalJSON for ReferenceableAuthenticationPolicy. +func (rap *ReferenceableAuthenticationPolicy) MarshalJSON() ([]byte, error) { + if rap.Use != nil { + return json.Marshal(map[string]interface{}{"use": rap.Use}) + } + if rap.AuthenticationPolicy != nil { + return json.Marshal(rap.AuthenticationPolicy) + } + return nil, errors.New("invalid ReferenceableAuthenticationPolicy: no valid configuration to marshal") +} + +func NewBasicAuth(username, password string) *AuthenticationPolicy { + return &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: username, + Password: password, + }} +} + +// BasicAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type BasicAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,basic_policy"` +} + +// BearerAuthenticationPolicy supports either an inline token or a secret reference (use). +type BearerAuthenticationPolicy struct { + Token string `json:"token,omitempty" validate:"required_without=Use,bearer_policy"` + Use string `json:"use,omitempty" validate:"required_without=Token"` +} + +// DigestAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type DigestAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,digest_policy"` +} + +// OpenIdConnectAuthenticationPolicy Use OpenIdConnect authentication. +type OpenIdConnectAuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"omitempty,required_without=Use"` + Use string `json:"use,omitempty" validate:"omitempty,required_without=Properties"` +} diff --git a/model/authentication_oauth.go b/model/authentication_oauth.go new file mode 100644 index 0000000..e6e5f54 --- /dev/null +++ b/model/authentication_oauth.go @@ -0,0 +1,212 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Endpoints are composed here and not on a separate wrapper object to avoid too many nested objects and inline marshaling. +// This allows us to reuse OAuth2AuthenticationProperties also on OpenIdConnectAuthenticationPolicy + +type OAuth2AuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"required_without=Use"` + Endpoints *OAuth2Endpoints `json:"endpoints,omitempty"` + Use string `json:"use,omitempty" validate:"oauth2_policy"` +} + +func (o *OAuth2AuthenticationPolicy) ApplyDefaults() { + if o.Endpoints == nil { + return + } + + // Apply defaults if the respective fields are empty + if o.Endpoints.Token == "" { + o.Endpoints.Token = OAuth2DefaultTokenURI + } + if o.Endpoints.Revocation == "" { + o.Endpoints.Revocation = OAuth2DefaultRevokeURI + } + if o.Endpoints.Introspection == "" { + o.Endpoints.Introspection = OAuth2DefaultIntrospectionURI + } +} + +func (o *OAuth2AuthenticationPolicy) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationPolicy + aux := &struct { + *Alias + }{ + Alias: (*Alias)(o), + } + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Initialize Properties if any field for it is set + if o.Properties == nil && containsOAuth2Properties(data) { + o.Properties = &OAuth2AuthenticationProperties{} + if err := json.Unmarshal(data, o.Properties); err != nil { + return err + } + } + + return nil +} + +func containsOAuth2Properties(data []byte) bool { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return false + } + for key := range raw { + if key != "use" { + return true + } + } + return false +} + +// MarshalJSON customizes the JSON output for OAuth2AuthenticationPolicy +func (o *OAuth2AuthenticationPolicy) MarshalJSON() ([]byte, error) { + o.ApplyDefaults() + // Create a map to hold the resulting JSON + result := make(map[string]interface{}) + + // Inline Properties fields if present + if o.Properties != nil { + propertiesJSON, err := json.Marshal(o.Properties) + if err != nil { + return nil, err + } + + var propertiesMap map[string]interface{} + if err := json.Unmarshal(propertiesJSON, &propertiesMap); err != nil { + return nil, err + } + + for key, value := range propertiesMap { + result[key] = value + } + } + + // Add the Use field if present + if o.Use != "" { + result["use"] = o.Use + } + + return json.Marshal(result) +} + +type OAuth2AuthenticationProperties struct { + Authority URITemplate `json:"authority,omitempty"` + Grant OAuth2AuthenticationDataGrant `json:"grant,omitempty" validate:"oneof='authorization_code' 'client_credentials' 'password' 'refresh_token' 'urn:ietf:params:oauth:grant-type:token-exchange'"` + Client *OAuth2AutenthicationDataClient `json:"client,omitempty"` + Request *OAuth2TokenRequest `json:"request,omitempty"` + Issuers []string `json:"issuers,omitempty"` + Scopes []string `json:"scopes,omitempty"` + Audiences []string `json:"audiences,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + Subject *OAuth2Token `json:"subject,omitempty"` + Actor *OAuth2Token `json:"actor,omitempty"` +} + +func (o *OAuth2AuthenticationProperties) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationProperties + aux := &struct { + Authority json.RawMessage `json:"authority"` + *Alias + }{ + Alias: (*Alias)(o), + } + + if err := json.Unmarshal(data, &aux); err != nil { + return fmt.Errorf("failed to unmarshal OAuth2AuthenticationProperties: %w", err) + } + + // Unmarshal the Authority field + if aux.Authority != nil { + uri, err := UnmarshalURITemplate(aux.Authority) + if err != nil { + return fmt.Errorf("invalid authority URI: %w", err) + } + o.Authority = uri + } + + return nil +} + +// OAuth2AuthenticationDataGrant represents the grant type to use in OAuth2 authentication. +type OAuth2AuthenticationDataGrant string + +// Valid grant types +const ( + AuthorizationCodeGrant OAuth2AuthenticationDataGrant = "authorization_code" + ClientCredentialsGrant OAuth2AuthenticationDataGrant = "client_credentials" + PasswordGrant OAuth2AuthenticationDataGrant = "password" + RefreshTokenGrant OAuth2AuthenticationDataGrant = "refresh_token" + TokenExchangeGrant OAuth2AuthenticationDataGrant = "urn:ietf:params:oauth:grant-type:token-exchange" // #nosec G101 +) + +type OAuthClientAuthenticationType string + +const ( + OAuthClientAuthClientSecretBasic OAuthClientAuthenticationType = "client_secret_basic" + OAuthClientAuthClientSecretPost OAuthClientAuthenticationType = "client_secret_post" + OAuthClientAuthClientSecretJWT OAuthClientAuthenticationType = "client_secret_jwt" + OAuthClientAuthPrivateKeyJWT OAuthClientAuthenticationType = "private_key_jwt" + OAuthClientAuthNone OAuthClientAuthenticationType = "none" +) + +type OAuth2TokenRequestEncodingType string + +const ( + EncodingTypeFormUrlEncoded OAuth2TokenRequestEncodingType = "application/x-www-form-urlencoded" + EncodingTypeApplicationJson OAuth2TokenRequestEncodingType = "application/json" +) + +// OAuth2AutenthicationDataClient The definition of an OAuth2 client. +type OAuth2AutenthicationDataClient struct { + ID string `json:"id,omitempty"` + Secret string `json:"secret,omitempty"` + Assertion string `json:"assertion,omitempty"` + Authentication OAuthClientAuthenticationType `json:"authentication,omitempty" validate:"client_auth_type"` +} + +type OAuth2TokenRequest struct { + Encoding OAuth2TokenRequestEncodingType `json:"encoding" validate:"encoding_type"` +} + +// OAuth2Token Represents an OAuth2 token. +type OAuth2Token struct { + // Token The security token to use + Token string `json:"token,omitempty"` + // Type The type of the security token to use. + Type string `json:"type,omitempty"` +} + +type OAuth2Endpoints struct { + Token string `json:"token,omitempty"` + Revocation string `json:"revocation,omitempty"` + Introspection string `json:"introspection,omitempty"` +} + +const ( + OAuth2DefaultTokenURI = "/oauth2/token" // #nosec G101 + OAuth2DefaultRevokeURI = "/oauth2/revoke" + OAuth2DefaultIntrospectionURI = "/oauth2/introspect" +) diff --git a/model/authentication_oauth_test.go b/model/authentication_oauth_test.go new file mode 100644 index 0000000..820dac1 --- /dev/null +++ b/model/authentication_oauth_test.go @@ -0,0 +1,164 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestOAuth2AuthenticationPolicyValidation(t *testing.T) { + testCases := []struct { + name string + policy OAuth2AuthenticationPolicy + shouldPass bool + }{ + { + name: "Valid: Use set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + }, + shouldPass: true, + }, + { + name: "Valid: Properties set", + policy: OAuth2AuthenticationPolicy{ + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: true, + }, + { + name: "Invalid: Both Use and Properties set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: false, + }, + { + name: "Invalid: Neither Use nor Properties set", + policy: OAuth2AuthenticationPolicy{}, + shouldPass: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := validate.Struct(tc.policy) + if tc.shouldPass { + if err != nil { + t.Errorf("Expected validation to pass, but got error: %v", err) + } + } else { + if err == nil { + t.Errorf("Expected validation to fail, but it passed") + } + } + }) + } +} + +func TestAuthenticationOAuth2Policy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid OAuth2 Authentication Inline", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "scopes": ["scope1", "scope2"] + } + }`, + expected: `{"oauth2":{"authority":"https://auth.example.com","grant":"client_credentials","scopes":["scope1","scope2"]}}`, + expectsErr: false, + }, + { + name: "Valid OAuth2 Authentication Use", + input: `{ + "oauth2": { + "use": "mysecret" + } + }`, + expected: `{"oauth2":{"use":"mysecret"}}`, + expectsErr: false, + }, + { + name: "Invalid OAuth2: Both properties and use set", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "use": "mysecret" + } + }`, + expectsErr: true, + }, + { + name: "Invalid OAuth2: Missing required fields", + input: `{ + "oauth2": {} + }`, + expectsErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + err = validate.Struct(authPolicy) + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/authentication_test.go b/model/authentication_test.go new file mode 100644 index 0000000..af0f687 --- /dev/null +++ b/model/authentication_test.go @@ -0,0 +1,98 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestAuthenticationPolicy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid Basic Authentication Inline", + input: `{ + "basic": { + "username": "john", + "password": "12345" + } + }`, + expected: `{"basic":{"username":"john","password":"12345"}}`, + expectsErr: false, + }, + { + name: "Valid Digest Authentication Inline", + input: `{ + "digest": { + "username": "digestUser", + "password": "digestPass" + } + }`, + expected: `{"digest":{"username":"digestUser","password":"digestPass"}}`, + expectsErr: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + if authPolicy.Basic != nil { + err = validate.Struct(authPolicy.Basic) + } + if authPolicy.Bearer != nil { + err = validate.Struct(authPolicy.Bearer) + } + if authPolicy.Digest != nil { + err = validate.Struct(authPolicy.Digest) + } + if authPolicy.OAuth2 != nil { + err = validate.Struct(authPolicy.OAuth2) + } + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/builder.go b/model/builder.go new file mode 100644 index 0000000..81a51c6 --- /dev/null +++ b/model/builder.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + + "sigs.k8s.io/yaml" +) + +// WorkflowBuilder helps construct and serialize a Workflow object. +type WorkflowBuilder struct { + workflow *Workflow +} + +// NewWorkflowBuilder initializes a new WorkflowBuilder. +func NewWorkflowBuilder() *WorkflowBuilder { + return &WorkflowBuilder{ + workflow: &Workflow{ + Document: Document{}, + Do: &TaskList{}, + }, + } +} + +// SetDocument sets the Document fields in the Workflow. +func (wb *WorkflowBuilder) SetDocument(dsl, namespace, name, version string) *WorkflowBuilder { + wb.workflow.Document.DSL = dsl + wb.workflow.Document.Namespace = namespace + wb.workflow.Document.Name = name + wb.workflow.Document.Version = version + return wb +} + +// AddTask adds a TaskItem to the Workflow's Do list. +func (wb *WorkflowBuilder) AddTask(key string, task Task) *WorkflowBuilder { + *wb.workflow.Do = append(*wb.workflow.Do, &TaskItem{ + Key: key, + Task: task, + }) + return wb +} + +// SetInput sets the Input for the Workflow. +func (wb *WorkflowBuilder) SetInput(input *Input) *WorkflowBuilder { + wb.workflow.Input = input + return wb +} + +// SetOutput sets the Output for the Workflow. +func (wb *WorkflowBuilder) SetOutput(output *Output) *WorkflowBuilder { + wb.workflow.Output = output + return wb +} + +// SetTimeout sets the Timeout for the Workflow. +func (wb *WorkflowBuilder) SetTimeout(timeout *TimeoutOrReference) *WorkflowBuilder { + wb.workflow.Timeout = timeout + return wb +} + +// SetUse sets the Use section for the Workflow. +func (wb *WorkflowBuilder) SetUse(use *Use) *WorkflowBuilder { + wb.workflow.Use = use + return wb +} + +// SetSchedule sets the Schedule for the Workflow. +func (wb *WorkflowBuilder) SetSchedule(schedule *Schedule) *WorkflowBuilder { + wb.workflow.Schedule = schedule + return wb +} + +// Build returns the constructed Workflow object. +func (wb *WorkflowBuilder) Build() *Workflow { + return wb.workflow +} + +// ToYAML serializes the Workflow to YAML format. +func (wb *WorkflowBuilder) ToYAML() ([]byte, error) { + return yaml.Marshal(wb.workflow) +} + +// ToJSON serializes the Workflow to JSON format. +func (wb *WorkflowBuilder) ToJSON() ([]byte, error) { + return json.MarshalIndent(wb.workflow, "", " ") +} diff --git a/model/callback_state.go b/model/callback_state.go deleted file mode 100644 index 1dadcb6..0000000 --- a/model/callback_state.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// CallbackState executes a function and waits for callback event that indicates completion of the task. -type CallbackState struct { - // Defines the action to be executed. - // +kubebuilder:validation:Required - Action Action `json:"action"` - // References a unique callback event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Time period to wait for incoming events (ISO 8601 format) - // +optional - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` -} - -func (c *CallbackState) MarshalJSON() ([]byte, error) { - type Alias CallbackState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(c), - Timeouts: c.Timeouts, - }) - return custom, err -} - -// CallbackStateTimeout defines timeout settings for callback state -type CallbackStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/callback_state_validator_test.go b/model/callback_state_validator_test.go deleted file mode 100644 index a89cea9..0000000 --- a/model/callback_state_validator_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildCallbackState(workflow *Workflow, name, eventRef string) *State { - consumeEvent := Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, consumeEvent) - - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeCallback, - }, - CallbackState: &CallbackState{ - EventRef: eventRef, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func buildCallbackStateTimeout(callbackState *CallbackState) *CallbackStateTimeout { - callbackState.Timeouts = &CallbackStateTimeout{} - return callbackState.Timeouts -} - -func TestCallbackStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.EventRef = "" - return *model - }, - Err: `workflow.states[0].callbackState.eventRef is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestCallbackStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: `success`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: `omitempty`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "" - model.States[0].CallbackState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].CallbackState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].callbackState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].callbackState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/common.go b/model/common.go deleted file mode 100644 index 3d4f000..0000000 --- a/model/common.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// Common schema for Serverless Workflow specification -type Common struct { - // Metadata information - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` -} - -// Metadata information -// +kubebuilder:pruning:PreserveUnknownFields -// +kubebuilder:validation:Schemaless -type Metadata map[string]Object diff --git a/model/delay_state.go b/model/delay_state.go deleted file mode 100644 index 3227e74..0000000 --- a/model/delay_state.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "encoding/json" - -// DelayState Causes the workflow execution to delay for a specified duration -type DelayState struct { - // Amount of time (ISO 8601 format) to delay - // +kubebuilder:validation:Required - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` -} - -func (a *DelayState) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` - }{ - TimeDelay: a.TimeDelay, - }) - return custom, err -} diff --git a/model/delay_state_test.go b/model/delay_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/delay_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/delay_state_validator_test.go b/model/delay_state_validator_test.go deleted file mode 100644 index aed36c5..0000000 --- a/model/delay_state_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildDelayState(workflow *Workflow, name, timeDelay string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeDelay, - }, - DelayState: &DelayState{ - TimeDelay: timeDelay, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func TestDelayStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - delayState := buildDelayState(baseWorkflow, "start state", "PT5S") - buildEndByState(delayState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "P5S" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/doc.go b/model/doc.go deleted file mode 100644 index 1508354..0000000 --- a/model/doc.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// +k8s:deepcopy-gen=package -// +k8s:deepcopy-gen:nonpointer-interfaces=true diff --git a/model/endpoint.go b/model/endpoint.go new file mode 100644 index 0000000..9c59fb5 --- /dev/null +++ b/model/endpoint.go @@ -0,0 +1,184 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" + + "github.com/tidwall/gjson" +) + +// LiteralUriPattern matches standard URIs without placeholders. +var LiteralUriPattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://[^{}\s]+$`) + +// LiteralUriTemplatePattern matches URIs with placeholders. +var LiteralUriTemplatePattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://.*\{.*}.*$`) + +// URITemplate represents a URI that can be a literal URI or a URI template. +type URITemplate interface { + IsURITemplate() bool + String() string +} + +// UnmarshalURITemplate is a shared function for unmarshalling URITemplate fields. +func UnmarshalURITemplate(data []byte) (URITemplate, error) { + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("failed to unmarshal URITemplate: %w", err) + } + + if LiteralUriTemplatePattern.MatchString(raw) { + return &LiteralUriTemplate{Value: raw}, nil + } + + if LiteralUriPattern.MatchString(raw) { + return &LiteralUri{Value: raw}, nil + } + + return nil, fmt.Errorf("invalid URI or URI template format: %s", raw) +} + +type LiteralUriTemplate struct { + Value string `json:"-" validate:"required,uri_template_pattern"` // Validate pattern for URI template. +} + +func (t *LiteralUriTemplate) IsURITemplate() bool { + return true +} + +func (t *LiteralUriTemplate) MarshalJSON() ([]byte, error) { + return json.Marshal(t.Value) +} + +func (t *LiteralUriTemplate) String() string { + return t.Value +} + +type LiteralUri struct { + Value string `json:"-" validate:"required,uri_pattern"` // Validate pattern for URI. +} + +func (u *LiteralUri) IsURITemplate() bool { + return true +} + +func (u *LiteralUri) MarshalJSON() ([]byte, error) { + return json.Marshal(u.Value) +} + +func (u *LiteralUri) String() string { + return u.Value +} + +type EndpointConfiguration struct { + URI URITemplate `json:"uri" validate:"required"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +// UnmarshalJSON implements custom unmarshalling for EndpointConfiguration. +func (e *EndpointConfiguration) UnmarshalJSON(data []byte) error { + // Use a temporary structure to unmarshal the JSON + type Alias EndpointConfiguration + temp := &struct { + URI json.RawMessage `json:"uri"` + *Alias + }{ + Alias: (*Alias)(e), + } + + if err := json.Unmarshal(data, &temp); err != nil { + return fmt.Errorf("failed to unmarshal EndpointConfiguration: %w", err) + } + + // Unmarshal the URI field into the appropriate URITemplate implementation + uri, err := UnmarshalURITemplate(temp.URI) + if err != nil { + return fmt.Errorf("invalid URI in EndpointConfiguration: %w", err) + } + e.URI = uri + + return nil +} + +type Endpoint struct { + RuntimeExpression *RuntimeExpression `json:"-"` + URITemplate URITemplate `json:"-"` + EndpointConfig *EndpointConfiguration `json:"-"` +} + +func NewEndpoint(uri string) *Endpoint { + return &Endpoint{URITemplate: &LiteralUri{Value: uri}} +} + +func (e *Endpoint) String() string { + if e.RuntimeExpression != nil { + return e.RuntimeExpression.String() + } + if e.URITemplate != nil { + return e.URITemplate.String() + } + if e.EndpointConfig != nil { + return e.EndpointConfig.URI.String() + } + return "" +} + +// UnmarshalJSON implements custom unmarshalling for Endpoint. +func (e *Endpoint) UnmarshalJSON(data []byte) error { + if gjson.ValidBytes(data) && gjson.ParseBytes(data).IsObject() && len(gjson.ParseBytes(data).Map()) == 0 { + // Leave the Endpoint fields unset (nil) + return nil + } + + // Then try to unmarshal as URITemplate + if uriTemplate, err := UnmarshalURITemplate(data); err == nil { + e.URITemplate = uriTemplate + return nil + } + + // First try to unmarshal as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + e.RuntimeExpression = &runtimeExpr + return nil + } + + // Finally, try to unmarshal as EndpointConfiguration + var endpointConfig EndpointConfiguration + if err := json.Unmarshal(data, &endpointConfig); err == nil { + e.EndpointConfig = &endpointConfig + return nil + } + + return errors.New("failed to unmarshal Endpoint: data does not match any known schema") +} + +// MarshalJSON implements custom marshalling for Endpoint. +func (e *Endpoint) MarshalJSON() ([]byte, error) { + if e.RuntimeExpression != nil { + return json.Marshal(e.RuntimeExpression) + } + if e.URITemplate != nil { + return json.Marshal(e.URITemplate) + } + if e.EndpointConfig != nil { + return json.Marshal(e.EndpointConfig) + } + // Return an empty JSON object when no fields are set + return []byte("{}"), nil +} diff --git a/model/endpoint_test.go b/model/endpoint_test.go new file mode 100644 index 0000000..59ddd45 --- /dev/null +++ b/model/endpoint_test.go @@ -0,0 +1,144 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEndpoint_UnmarshalJSON(t *testing.T) { + t.Run("Valid RuntimeExpression", func(t *testing.T) { + input := `"${example}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.RuntimeExpression, "RuntimeExpression should be set") + assert.Equal(t, "${example}", endpoint.RuntimeExpression.Value, "RuntimeExpression value should match") + }) + + t.Run("Invalid RuntimeExpression", func(t *testing.T) { + input := `"123invalid-expression"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid runtime expression") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + }) + + t.Run("Invalid LiteralUriTemplate", func(t *testing.T) { + uriTemplate := &LiteralUriTemplate{Value: "example.com/{id}"} + assert.False(t, LiteralUriPattern.MatchString(uriTemplate.Value), "LiteralUriTemplate should not match URI pattern") + }) + + t.Run("Valid URITemplate", func(t *testing.T) { + input := `"http://example.com/{id}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.URITemplate, "URITemplate should be set") + }) + + t.Run("Valid EndpointConfiguration", func(t *testing.T) { + input := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "admin", "password": "admin" } + } + }` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Username, "Authentication Username should match") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Password, "Authentication Password should match") + }) + + t.Run("Invalid JSON Structure", func(t *testing.T) { + input := `{"invalid": "data"}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid JSON structure") + }) + + t.Run("Empty Input", func(t *testing.T) { + input := `{}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error for empty input") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + assert.Nil(t, endpoint.URITemplate, "URITemplate should not be set") + assert.Nil(t, endpoint.EndpointConfig, "EndpointConfig should not be set") + }) +} + +func TestEndpoint_MarshalJSON(t *testing.T) { + t.Run("Marshal RuntimeExpression", func(t *testing.T) { + endpoint := &Endpoint{ + RuntimeExpression: &RuntimeExpression{Value: "${example}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"${example}"`, string(data), "Output JSON should match") + }) + + t.Run("Marshal URITemplate", func(t *testing.T) { + endpoint := &Endpoint{ + URITemplate: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"http://example.com/{id}"`, string(data), "Output JSON should match") + }) + + t.Run("Marshal EndpointConfiguration", func(t *testing.T) { + endpoint := &Endpoint{ + EndpointConfig: &EndpointConfiguration{ + URI: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + Authentication: &ReferenceableAuthenticationPolicy{AuthenticationPolicy: &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: "john", + Password: "secret", + }}}, + }, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + expected := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "john", "password": "secret" } + } + }` + assert.JSONEq(t, expected, string(data), "Output JSON should match") + }) + + t.Run("Marshal Empty Endpoint", func(t *testing.T) { + endpoint := Endpoint{} + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `{}`, string(data), "Output JSON should be empty") + }) +} diff --git a/model/event.go b/model/event.go deleted file mode 100644 index bad1ce4..0000000 --- a/model/event.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventKind defines this event as either `consumed` or `produced` -type EventKind string - -func (i EventKind) KindValues() []string { - return []string{ - string(EventKindConsumed), - string(EventKindProduced), - } -} - -func (i EventKind) String() string { - return string(i) -} - -const ( - // EventKindConsumed means the event continuation of workflow instance execution - EventKindConsumed EventKind = "consumed" - - // EventKindProduced means the event was created during workflow instance execution - EventKindProduced EventKind = "produced" -) - -// Event used to define events and their correlations -// +builder-gen:new-call=ApplyDefault -type Event struct { - Common `json:",inline"` - // Unique event name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // CloudEvent source. - // +optional - Source string `json:"source,omitempty"` - // CloudEvent type. - // +kubebuilder:validation:Required - Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Defaults to `consumed`. - // +kubebuilder:validation:Enum=consumed;produced - // +kubebuilder:default=consumed - Kind EventKind `json:"kind,omitempty" validate:"required,oneofkind"` - // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload - // and context attributes should be accessible. Defaults to true. - // +kubebuilder:default=true - // +optional - DataOnly bool `json:"dataOnly,omitempty"` - // Define event correlation rules for this event. Only used for consumed events. - // +optional - Correlation []Correlation `json:"correlation,omitempty" validate:"dive"` -} - -type eventUnmarshal Event - -// UnmarshalJSON unmarshal Event object from json bytes -func (e *Event) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("event", data, (*eventUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event -func (e *Event) ApplyDefault() { - e.DataOnly = true - e.Kind = EventKindConsumed -} - -// Correlation define event correlation rules for an event. Only used for `consumed` events -type Correlation struct { - // CloudEvent Extension Context Attribute name - // +kubebuilder:validation:Required - ContextAttributeName string `json:"contextAttributeName" validate:"required"` - // CloudEvent Extension Context Attribute value - // +optional - ContextAttributeValue string `json:"contextAttributeValue,omitempty"` -} - -// EventRef defining invocation of a function via event -// +builder-gen:new-call=ApplyDefault -type EventRef struct { - // Reference to the unique name of a 'produced' event definition, - // +kubebuilder:validation:Required - TriggerEventRef string `json:"triggerEventRef" validate:"required"` - // Reference to the unique name of a 'consumed' event definition - // +kubebuilder:validation:Required - ResultEventRef string `json:"resultEventRef" validate:"required"` - // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - // actionExecutionTimeout - // +optional - ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // If string type, an expression which selects parts of the states data output to become the data (payload) - // of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - // of the event referenced by triggerEventRef. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data *Object `json:"data,omitempty"` - // Add additional extension context attributes to the produced event. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type eventRefUnmarshal EventRef - -// UnmarshalJSON implements json.Unmarshaler -func (e *EventRef) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event Ref -func (e *EventRef) ApplyDefault() { - e.Invoke = InvokeKindSync -} diff --git a/model/event_data_filter.go b/model/event_data_filter.go deleted file mode 100644 index 1db5bbf..0000000 --- a/model/event_data_filter.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventDataFilter used to filter consumed event payloads. -// +builder-gen:new-call=ApplyDefault -type EventDataFilter struct { - // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - // should be ignored. Default is true. - // +optional - UseData bool `json:"useData,omitempty"` - // Workflow expression that filters of the event data (payload). - // +optional - Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. - // If not specified denotes the top-level state data element - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type eventDataFilterUnmarshal EventDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (f *EventDataFilter) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) -} - -// ApplyDefault set the default values for Event Data Filter -func (f *EventDataFilter) ApplyDefault() { - f.UseData = true -} diff --git a/model/event_data_filter_test.go b/model/event_data_filter_test.go deleted file mode 100644 index e4bf979..0000000 --- a/model/event_data_filter_test.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: true, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"UseData": false, "data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: false, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: EventDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"data": 1, "toStateData": "2"}`, - expect: EventDataFilter{}, - err: `eventDataFilter.data must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_data_filter_validator_test.go b/model/event_data_filter_validator_test.go deleted file mode 100644 index 1bbbac9..0000000 --- a/model/event_data_filter_validator_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestEventDataFilterStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event_state.go b/model/event_state.go deleted file mode 100644 index 39bd590..0000000 --- a/model/event_state.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// EventState await one or more events and perform actions when they are received. If defined as the -// workflow starting state, the event state definition controls when the workflow instances should be created. -// +builder-gen:new-call=ApplyDefault -type EventState struct { - // TODO: EventState doesn't have usedForCompensation field. - - // If true consuming one of the defined events causes its associated actions to be performed. If false all - // the defined events must be consumed in order for actions to be performed. Defaults to true. - // +kubebuilder:default=true - // +optional - Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed. - // +kubebuilder:validation:MinItems=1 - OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts. - // +optional - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` -} - -func (e *EventState) MarshalJSON() ([]byte, error) { - type Alias EventState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(e), - Timeouts: e.Timeouts, - }) - return custom, err -} - -type eventStateUnmarshal EventState - -// UnmarshalJSON unmarshal EventState object from json bytes -func (e *EventState) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event State -func (e *EventState) ApplyDefault() { - e.Exclusive = true -} - -// OnEvents define which actions are be performed for the one or more events. -// +builder-gen:new-call=ApplyDefault -type OnEvents struct { - // References one or more unique event names in the defined workflow events. - // +kubebuilder:validation:MinItems=1 - EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Should actions be performed sequentially or in parallel. Default is sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed if expression matches - // +optional - Actions []Action `json:"actions,omitempty" validate:"dive"` - // eventDataFilter defines the callback event data filter definition - // +optional - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -type onEventsUnmarshal OnEvents - -// UnmarshalJSON unmarshal OnEvents object from json bytes -func (o *OnEvents) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) -} - -// ApplyDefault set the default values for On Events -func (o *OnEvents) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// EventStateTimeout defines timeout settings for event state -type EventStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/event_state_test.go b/model/event_state_test.go deleted file mode 100644 index 348aaea..0000000 --- a/model/event_state_test.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect State - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"name": "1", "type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: false, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - { - desp: "default exclusive", - data: `{"name": "1", "type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: true, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := State{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestOnEventsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OnEvents - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "default action mode", - data: `{"eventRefs": ["E1", "E2"]}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{}, - err: `invalid character ':' after top-level value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OnEvents{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_state_validator.go b/model/event_state_validator.go deleted file mode 100644 index d4f2f40..0000000 --- a/model/event_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStateStructLevelValidationCtx), EventState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onEventsStructLevelValidationCtx), OnEvents{}) -} - -func eventStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // EventRefs -} - -func onEventsStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onEvent := structLevel.Current().Interface().(OnEvents) - for _, eventRef := range onEvent.EventRefs { - if eventRef != "" && !ctx.ExistEvent(eventRef) { - structLevel.ReportError(eventRef, "eventRefs", "EventRefs", val.TagExists, "") - } - } -} diff --git a/model/event_state_validator_test.go b/model/event_state_validator_test.go deleted file mode 100644 index ea7d319..0000000 --- a/model/event_state_validator_test.go +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildEventState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeEvent, - }, - EventState: &EventState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOnEvents(workflow *Workflow, state *State, name string) *OnEvents { - event := Event{ - Name: name, - Type: "type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, event) - - state.EventState.OnEvents = append(state.EventState.OnEvents, OnEvents{ - EventRefs: []string{event.Name}, - ActionMode: ActionModeParallel, - }) - - return &state.EventState.OnEvents[len(state.EventState.OnEvents)-1] -} - -func buildEventStateTimeout(state *State) *EventStateTimeout { - state.EventState.Timeouts = &EventStateTimeout{ - ActionExecTimeout: "PT5S", - EventTimeout: "PT5S", - } - return state.EventState.Timeouts -} - -func TestEventStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = nil - return *model - }, - Err: `workflow.states[0].eventState.onEvents is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = []OnEvents{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents must have the minimum 1`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestOnEventsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{"event not found"} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = nil - model.States[0].EventState.OnEvents[0].ActionMode = "" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs is required -workflow.states[0].eventState.onEvents[0].actionMode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs must have the minimum 1`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].actionMode need by one of [sequential parallel]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestEventStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildEventStateTimeout(eventState) - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "" - model.States[0].EventState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].EventState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].eventState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].eventState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event_test.go b/model/event_test.go deleted file mode 100644 index f557c61..0000000 --- a/model/event_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventRef - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"invoke": "async"}`, - expect: EventRef{ - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "invoke unset", - data: `{}`, - expect: EventRef{ - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"invoke": 1}`, - expect: EventRef{}, - err: `eventRef.invoke must be sync or async`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestEventUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Event - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"dataOnly": false, "kind": "produced"}`, - expect: Event{ - DataOnly: false, - Kind: EventKindProduced, - }, - err: ``, - }, - { - desp: "optional field dataOnly & kind unset", - data: `{}`, - expect: Event{ - DataOnly: true, - Kind: EventKindConsumed, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"dataOnly": "false", "kind": "produced"}`, - expect: Event{}, - err: `event.dataOnly must be bool`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Event - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_validator.go b/model/event_validator.go deleted file mode 100644 index 7b4daa9..0000000 --- a/model/event_validator.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStructLevelValidation), Event{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventRefStructLevelValidation), EventRef{}) -} - -// eventStructLevelValidation custom validator for event kind consumed -func eventStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { -} - -func eventRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - model := structLevel.Current().Interface().(EventRef) - if model.TriggerEventRef != "" && !ctx.ExistEvent(model.TriggerEventRef) { - structLevel.ReportError(model.TriggerEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } - if model.ResultEventRef != "" && !ctx.ExistEvent(model.ResultEventRef) { - structLevel.ReportError(model.ResultEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } -} diff --git a/model/event_validator_test.go b/model/event_validator_test.go deleted file mode 100644 index 80340b0..0000000 --- a/model/event_validator_test.go +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildEventRef(workflow *Workflow, action *Action, triggerEvent, resultEvent string) *EventRef { - produceEvent := Event{ - Name: triggerEvent, - Type: "event type", - Kind: EventKindProduced, - } - - consumeEvent := Event{ - Name: resultEvent, - Type: "event type", - Kind: EventKindProduced, - } - - workflow.Events = append(workflow.Events, produceEvent) - workflow.Events = append(workflow.Events, consumeEvent) - - eventRef := &EventRef{ - TriggerEventRef: triggerEvent, - ResultEventRef: resultEvent, - Invoke: InvokeKindSync, - } - - action.EventRef = eventRef - return action.EventRef -} - -func buildCorrelation(event *Event) *Correlation { - event.Correlation = append(event.Correlation, Correlation{ - ContextAttributeName: "attribute name", - }) - - return &event.Correlation[len(event.Correlation)-1] -} - -func TestEventStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events = append(model.Events, model.Events[0]) - return *model - }, - Err: `workflow.events has duplicate "name"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Name = "" - model.Events[0].Type = "" - model.Events[0].Kind = "" - return *model - }, - Err: `workflow.events[0].name is required -workflow.events[0].type is required -workflow.events[0].kind is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Kind = EventKindConsumed + "invalid" - return *model - }, - Err: `workflow.events[0].kind need by one of [consumed produced]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestCorrelationStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - buildCorrelation(&baseWorkflow.Events[0]) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "empty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation = nil - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation[0].ContextAttributeName = "" - return *model - }, - Err: `workflow.events[0].correlation[0].contextAttributeName is required`, - }, - //TODO: Add test: correlation only used for `consumed` events - } - - StructLevelValidationCtx(t, testCases) -} - -func TestEventRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - eventRef := buildEventRef(baseWorkflow, action1, "event 1", "event 2") - eventRef.ResultEventTimeout = "PT1H" - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef is required -workflow.states[0].actions[0].eventRef.resultEventRef is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "invalid event" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "invalid event 2" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event" -workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event 2"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.ResultEventTimeout = "10hs" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.resultEventTimeout invalid iso8601 duration "10hs"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/extension.go b/model/extension.go new file mode 100644 index 0000000..b7b49ec --- /dev/null +++ b/model/extension.go @@ -0,0 +1,120 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Extension represents the definition of an extension. +type Extension struct { + Extend string `json:"extend" validate:"required,oneof=call composite emit for listen raise run set switch try wait all"` + When *RuntimeExpression `json:"when,omitempty"` + Before *TaskList `json:"before,omitempty" validate:"omitempty,dive"` + After *TaskList `json:"after,omitempty" validate:"omitempty,dive"` +} + +// ExtensionItem represents a named extension and its associated definition. +type ExtensionItem struct { + Key string `json:"-" validate:"required"` + Extension *Extension `json:"-" validate:"required"` +} + +// MarshalJSON for ExtensionItem to serialize as a single-key object. +func (ei *ExtensionItem) MarshalJSON() ([]byte, error) { + if ei == nil { + return nil, fmt.Errorf("cannot marshal a nil ExtensionItem") + } + + extensionJSON, err := json.Marshal(ei.Extension) + if err != nil { + return nil, fmt.Errorf("failed to marshal extension: %w", err) + } + + return json.Marshal(map[string]json.RawMessage{ + ei.Key: extensionJSON, + }) +} + +// UnmarshalJSON for ExtensionItem to deserialize from a single-key object. +func (ei *ExtensionItem) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionItem: %w", err) + } + + if len(raw) != 1 { + return fmt.Errorf("each ExtensionItem must have exactly one key") + } + + for key, extensionData := range raw { + var ext Extension + if err := json.Unmarshal(extensionData, &ext); err != nil { + return fmt.Errorf("failed to unmarshal extension %q: %w", key, err) + } + ei.Key = key + ei.Extension = &ext + break + } + + return nil +} + +// ExtensionList represents a list of extensions. +type ExtensionList []*ExtensionItem + +// Key retrieves all extensions with the specified key. +func (el *ExtensionList) Key(key string) *Extension { + for _, item := range *el { + if item.Key == key { + return item.Extension + } + } + return nil +} + +// UnmarshalJSON for ExtensionList to deserialize an array of ExtensionItem objects. +func (el *ExtensionList) UnmarshalJSON(data []byte) error { + var rawExtensions []json.RawMessage + if err := json.Unmarshal(data, &rawExtensions); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionList: %w", err) + } + + for _, raw := range rawExtensions { + var item ExtensionItem + if err := json.Unmarshal(raw, &item); err != nil { + return fmt.Errorf("failed to unmarshal extension item: %w", err) + } + *el = append(*el, &item) + } + + return nil +} + +// MarshalJSON for ExtensionList to serialize as an array of ExtensionItem objects. +func (el *ExtensionList) MarshalJSON() ([]byte, error) { + var serializedExtensions []json.RawMessage + + for _, item := range *el { + serialized, err := json.Marshal(item) + if err != nil { + return nil, fmt.Errorf("failed to marshal ExtensionItem: %w", err) + } + serializedExtensions = append(serializedExtensions, serialized) + } + + return json.Marshal(serializedExtensions) +} diff --git a/model/extension_test.go b/model/extension_test.go new file mode 100644 index 0000000..7a11a5f --- /dev/null +++ b/model/extension_test.go @@ -0,0 +1,140 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestExtension_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var extension Extension + err := json.Unmarshal([]byte(jsonData), &extension) + assert.NoError(t, err) + assert.Equal(t, "call", extension.Extend) + assert.Equal(t, NewExpr("${condition}"), extension.When) + + task1 := extension.Before.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + // Check if task2 exists before accessing its fields + task2 := extension.After.Key("task2") + assert.NotNil(t, task2, "task2 should not be nil") + openAPITask := task2.AsCallOpenAPITask() + assert.NotNil(t, openAPITask) + assert.Equal(t, "openapi", openAPITask.Call) + assert.Equal(t, "doc1", openAPITask.With.Document.Name) + assert.Equal(t, "op1", openAPITask.With.OperationID) +} + +func TestExtension_MarshalJSON(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(extension) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestExtension_Validation(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "doc1", // Missing Endpoint + }, + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(extension) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s': %s", + validationErr.StructNamespace(), validationErr.Tag(), validationErr.Param()) + } + + // Assert on specific validation errors + assert.Contains(t, validationErrors.Error(), "After[0].Task.With.Document.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") + } else { + t.Errorf("Unexpected error type: %v", err) + } +} diff --git a/model/foreach_state.go b/model/foreach_state.go deleted file mode 100644 index aa19f4e..0000000 --- a/model/foreach_state.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) -type ForEachModeType string - -func (f ForEachModeType) KindValues() []string { - return []string{ - string(ForEachModeTypeSequential), - string(ForEachModeTypeParallel), - } -} - -func (f ForEachModeType) String() string { - return string(f) -} - -const ( - // ForEachModeTypeSequential specifies iterations should be done sequentially. - ForEachModeTypeSequential ForEachModeType = "sequential" - // ForEachModeTypeParallel specifies iterations should be done parallel. - ForEachModeTypeParallel ForEachModeType = "parallel" -) - -// ForEachState used to execute actions for each element of a data set. -// +builder-gen:new-call=ApplyDefault -type ForEachState struct { - // Workflow expression selecting an array element of the states' data. - // +kubebuilder:validation:Required - InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration. - // +optional - OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, - // this param should contain a unique element of the inputCollection array. - // +optional - IterationParam string `json:"iterationParam,omitempty"` - // Specifies how many iterations may run in parallel at the same time. Used if mode property is set to - // parallel (default). If not specified, its value should be the size of the inputCollection. - // +optional - BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection. - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions,omitempty" validate:"required,min=0,dive"` - // State specific timeout. - // +optional - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=parallel - Mode ForEachModeType `json:"mode,omitempty" validate:"required,oneofkind"` -} - -func (f *ForEachState) MarshalJSON() ([]byte, error) { - type Alias ForEachState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(f), - Timeouts: f.Timeouts, - }) - return custom, err -} - -type forEachStateUnmarshal ForEachState - -// UnmarshalJSON implements json.Unmarshaler -func (f *ForEachState) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) -} - -// ApplyDefault set the default values for ForEach State -func (f *ForEachState) ApplyDefault() { - f.Mode = ForEachModeTypeParallel -} - -// ForEachStateTimeout defines timeout settings for foreach state -type ForEachStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go deleted file mode 100644 index a10f7a9..0000000 --- a/model/foreach_state_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestForEachStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ForEachState - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"mode": "sequential"}`, - expect: &ForEachState{ - Mode: ForEachModeTypeSequential, - }, - err: ``, - }, - { - desp: "mode unset", - data: `{}`, - expect: &ForEachState{ - Mode: ForEachModeTypeParallel, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"mode": 1}`, - expect: nil, - err: `forEachState.mode must be sequential or parallel`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ForEachState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/foreach_state_validator.go b/model/foreach_state_validator.go deleted file mode 100644 index d1d9894..0000000 --- a/model/foreach_state_validator.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(forEachStateStructLevelValidation, ForEachState{}) -} - -// ForEachStateStructLevelValidation custom validator for ForEachState -func forEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - stateObj := structLevel.Current().Interface().(ForEachState) - - if stateObj.Mode != ForEachModeTypeParallel { - return - } - - if stateObj.BatchSize == nil { - return - } - - if !val.ValidateGt0IntStr(stateObj.BatchSize) { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } -} diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go deleted file mode 100644 index 8fb49d0..0000000 --- a/model/foreach_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildForEachState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeForEach, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Mode: ForEachModeTypeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func TestForEachStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - forEachState := buildForEachState(baseWorkflow, "start state") - buildEndByState(forEachState, true, false) - action1 := buildActionByForEachState(forEachState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - } - return *model - }, - }, - { - Desp: "success without batch size", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = nil - return *model - }, - }, - { - Desp: "gt0 int", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - } - return *model - }, - Err: `workflow.states[0].forEachState.batchSize must be greater than 0`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].forEachState.mode need by one of [sequential parallel]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.InputCollection = "" - model.States[0].ForEachState.Mode = "" - model.States[0].ForEachState.Actions = nil - return *model - }, - Err: `workflow.states[0].forEachState.inputCollection is required -workflow.states[0].forEachState.actions is required -workflow.states[0].forEachState.mode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Actions = []Action{} - return *model - }, - Err: ``, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestForEachStateTimeoutStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} diff --git a/model/function.go b/model/function.go deleted file mode 100644 index 7cf4197..0000000 --- a/model/function.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -const ( - // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service - // operation that needs to be invoked, separated by a '#'. - FunctionTypeREST FunctionType = "rest" - // FunctionTypeRPC a combination of the gRPC proto document URI and the particular service name and service method - // name that needs to be invoked, separated by a '#'. - FunctionTypeRPC FunctionType = "rpc" - // FunctionTypeExpression defines the expression syntax. - FunctionTypeExpression FunctionType = "expression" - // FunctionTypeGraphQL a combination of the GraphQL schema definition URI and the particular service name and - // service method name that needs to be invoked, separated by a '#' - FunctionTypeGraphQL FunctionType = "graphql" - // FunctionTypeAsyncAPI a combination of the AsyncApi definition document URI and the particular service operation - // that needs to be invoked, separated by a '#' - FunctionTypeAsyncAPI FunctionType = "asyncapi" - // FunctionTypeOData a combination of the GraphQL schema definition URI and the particular service name and service - // method name that needs to be invoked, separated by a '#' - FunctionTypeOData FunctionType = "odata" - // FunctionTypeCustom property defines a list of function types that are set by the specification. Some runtime - // implementations might support additional function types that extend the ones defined in the specification - FunctionTypeCustom FunctionType = "custom" -) - -// FunctionType ... -type FunctionType string - -func (i FunctionType) KindValues() []string { - return []string{ - string(FunctionTypeREST), - string(FunctionTypeRPC), - string(FunctionTypeExpression), - string(FunctionTypeGraphQL), - string(FunctionTypeAsyncAPI), - string(FunctionTypeOData), - string(FunctionTypeCustom), - } -} - -func (i FunctionType) String() string { - return string(i) -} - -// Function ... -// +builder-gen:new-call=ApplyDefault -type Function struct { - Common `json:",inline"` - // Unique function name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // If type is `rest`, #. - // If type is `rpc`, ##. - // If type is `expression`, defines the workflow expression. If the type is `custom`, - // #. - // +kubebuilder:validation:Required - Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. - // Default is `rest`. - // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;odata;asyncapi;custom - // +kubebuilder:default=rest - Type FunctionType `json:"type,omitempty" validate:"required,oneofkind"` - // References an auth definition name to be used to access to resource defined in the operation parameter. - // +optional - AuthRef string `json:"authRef,omitempty"` -} - -type functionUnmarshal Function - -// UnmarshalJSON implements json unmarshaler interface -func (f *Function) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("function", data, (*functionUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function -func (f *Function) ApplyDefault() { - f.Type = FunctionTypeREST -} diff --git a/model/function_validator_test.go b/model/function_validator_test.go deleted file mode 100644 index fcde6b9..0000000 --- a/model/function_validator_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestFunctionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Functions = Functions{{ - Name: "function 1", - Operation: "http://function/action", - Type: FunctionTypeREST, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Name = "" - model.Functions[0].Operation = "" - model.Functions[0].Type = "" - return *model - }, - Err: `workflow.functions[0].name is required -workflow.functions[0].operation is required -workflow.functions[0].type is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions = append(model.Functions, model.Functions[0]) - return *model - }, - Err: `workflow.functions has duplicate "name"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Type = FunctionTypeREST + "invalid" - return *model - }, - Err: `workflow.functions[0].type need by one of [rest rpc expression graphql asyncapi odata custom]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/inject_state.go b/model/inject_state.go deleted file mode 100644 index e3995c8..0000000 --- a/model/inject_state.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// InjectState used to inject static data into state data input. -type InjectState struct { - // JSON object which can be set as state's data input and can be manipulated via filter - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data map[string]Object `json:"data" validate:"required,min=1"` - // State specific timeouts - // +optional - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` -} - -func (i *InjectState) MarshalJSON() ([]byte, error) { - type Alias InjectState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(i), - Timeouts: i.Timeouts, - }) - return custom, err -} - -// InjectStateTimeout defines timeout settings for inject state -type InjectStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/inject_state_validator_test.go b/model/inject_state_validator_test.go deleted file mode 100644 index a8f127c..0000000 --- a/model/inject_state_validator_test.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestInjectStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} - -func TestInjectStateTimeoutStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/object.go b/model/object.go deleted file mode 100644 index e19d7b0..0000000 --- a/model/object.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "bytes" - "encoding/json" - "fmt" - "math" - "strconv" -) - -type Type int8 - -const ( - Null Type = iota - String - Int - Float - Map - Slice - Bool -) - -// Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. -// The DeepCopy tool allow us to easily import the Workflow types into a Kubernetes operator, -// which requires the DeepCopy method. -// -// It can marshal and unmarshal any type. -// This object type can be three types: -// - String - holds string values -// - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is -// parsed to int32 -// -// +kubebuilder:pruning:PreserveUnknownFields -// +kubebuilder:validation:Schemaless -type Object struct { - Type Type `json:"type,inline"` - StringValue string `json:"strVal,inline"` - IntValue int32 `json:"intVal,inline"` - FloatValue float64 `json:"floatVal,inline"` - MapValue map[string]Object `json:"mapVal,inline"` - SliceValue []Object `json:"sliceVal,inline"` - BoolValue bool `json:"boolValue,inline"` -} - -// UnmarshalJSON implements json.Unmarshaler -func (obj *Object) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - - if data[0] == '"' { - obj.Type = String - return json.Unmarshal(data, &obj.StringValue) - } else if data[0] == 't' || data[0] == 'f' { - obj.Type = Bool - return json.Unmarshal(data, &obj.BoolValue) - } else if data[0] == 'n' { - obj.Type = Null - return nil - } else if data[0] == '{' { - obj.Type = Map - return json.Unmarshal(data, &obj.MapValue) - } else if data[0] == '[' { - obj.Type = Slice - return json.Unmarshal(data, &obj.SliceValue) - } - - number := string(data) - intValue, err := strconv.ParseInt(number, 10, 32) - if err == nil { - obj.Type = Int - obj.IntValue = int32(intValue) - return nil - } - - floatValue, err := strconv.ParseFloat(number, 64) - if err == nil { - obj.Type = Float - obj.FloatValue = floatValue - return nil - } - - return fmt.Errorf("json invalid number %q", number) -} - -// MarshalJSON marshal the given json object into the respective Object subtype. -func (obj Object) MarshalJSON() ([]byte, error) { - switch obj.Type { - case String: - return []byte(fmt.Sprintf(`%q`, obj.StringValue)), nil - case Int: - return []byte(fmt.Sprintf(`%d`, obj.IntValue)), nil - case Float: - return []byte(fmt.Sprintf(`%f`, obj.FloatValue)), nil - case Map: - return json.Marshal(obj.MapValue) - case Slice: - return json.Marshal(obj.SliceValue) - case Bool: - return []byte(fmt.Sprintf(`%t`, obj.BoolValue)), nil - case Null: - return []byte("null"), nil - default: - panic("object invalid type") - } -} - -func FromString(val string) Object { - return Object{Type: String, StringValue: val} -} - -func FromInt(val int) Object { - if val > math.MaxInt32 || val < math.MinInt32 { - fmt.Println(fmt.Errorf("value: %d overflows int32", val)) - } - return Object{Type: Int, IntValue: int32(val)} -} - -func FromFloat(val float64) Object { - if val > math.MaxFloat64 || val < -math.MaxFloat64 { - fmt.Println(fmt.Errorf("value: %f overflows float64", val)) - } - return Object{Type: Float, FloatValue: float64(val)} -} - -func FromMap(mapValue map[string]any) Object { - mapValueObject := make(map[string]Object, len(mapValue)) - for key, value := range mapValue { - mapValueObject[key] = FromInterface(value) - } - return Object{Type: Map, MapValue: mapValueObject} -} - -func FromSlice(sliceValue []any) Object { - sliceValueObject := make([]Object, len(sliceValue)) - for key, value := range sliceValue { - sliceValueObject[key] = FromInterface(value) - } - return Object{Type: Slice, SliceValue: sliceValueObject} -} - -func FromBool(val bool) Object { - return Object{Type: Bool, BoolValue: val} -} - -func FromNull() Object { - return Object{Type: Null} -} - -func FromInterface(value any) Object { - switch v := value.(type) { - case string: - return FromString(v) - case int: - return FromInt(v) - case int32: - return FromInt(int(v)) - case float64: - return FromFloat(v) - case map[string]any: - return FromMap(v) - case []any: - return FromSlice(v) - case bool: - return FromBool(v) - case nil: - return FromNull() - } - panic("invalid type") -} - -func ToInterface(object Object) any { - switch object.Type { - case String: - return object.StringValue - case Int: - return object.IntValue - case Float: - return object.FloatValue - case Map: - mapInterface := make(map[string]any, len(object.MapValue)) - for key, value := range object.MapValue { - mapInterface[key] = ToInterface(value) - } - return mapInterface - case Slice: - sliceInterface := make([]any, len(object.SliceValue)) - for key, value := range object.SliceValue { - sliceInterface[key] = ToInterface(value) - } - return sliceInterface - case Bool: - return object.BoolValue - case Null: - return nil - } - panic("invalid type") -} diff --git a/model/object_test.go b/model/object_test.go deleted file mode 100644 index 0cf928f..0000000 --- a/model/object_test.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func Test_unmarshal(t *testing.T) { - testCases := []struct { - name string - json string - object Object - any any - err string - }{ - { - name: "string", - json: "\"value\"", - object: FromString("value"), - any: any("value"), - }, - { - name: "int", - json: "123", - object: FromInt(123), - any: any(int32(123)), - }, - { - name: "float", - json: "123.123", - object: FromFloat(123.123), - any: any(123.123), - }, - { - name: "map", - json: "{\"key\": \"value\", \"key2\": 123}", - object: FromMap(map[string]any{"key": "value", "key2": 123}), - any: any(map[string]any{"key": "value", "key2": int32(123)}), - }, - { - name: "slice", - json: "[\"key\", 123]", - object: FromSlice([]any{"key", 123}), - any: any([]any{"key", int32(123)}), - }, - { - name: "bool true", - json: "true", - object: FromBool(true), - any: any(true), - }, - { - name: "bool false", - json: "false", - object: FromBool(false), - any: any(false), - }, - { - name: "null", - json: "null", - object: FromNull(), - any: nil, - }, - { - name: "string invalid", - json: "\"invalid", - err: "unexpected end of JSON input", - }, - { - name: "number invalid", - json: "123a", - err: "invalid character 'a' after top-level value", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - o := Object{} - err := json.Unmarshal([]byte(tc.json), &o) - if tc.err == "" { - assert.NoError(t, err) - assert.Equal(t, tc.object, o) - assert.Equal(t, ToInterface(tc.object), tc.any) - } else { - assert.Equal(t, tc.err, err.Error()) - } - }) - } -} - -func Test_marshal(t *testing.T) { - testCases := []struct { - name string - json string - object Object - err string - }{ - { - name: "string", - json: "\"value\"", - object: FromString("value"), - }, - { - name: "int", - json: "123", - object: FromInt(123), - }, - { - name: "float", - json: "123.123000", - object: FromFloat(123.123), - }, - { - name: "map", - json: "{\"key\":\"value\",\"key2\":123}", - object: FromMap(map[string]any{"key": "value", "key2": 123}), - }, - { - name: "slice", - json: "[\"key\",123]", - object: FromSlice([]any{"key", 123}), - }, - { - name: "bool true", - json: "true", - object: FromBool(true), - }, - { - name: "bool false", - json: "false", - object: FromBool(false), - }, - { - name: "null", - json: "null", - object: FromNull(), - }, - { - name: "interface", - json: "[\"value\",123,123.123000,[1],{\"key\":1.100000},true,false,null]", - object: FromInterface([]any{ - "value", - 123, - 123.123, - []any{1}, - map[string]any{"key": 1.1}, - true, - false, - nil, - }), - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - json, err := json.Marshal(tc.object) - if tc.err == "" { - assert.NoError(t, err) - assert.Equal(t, tc.json, string(json)) - } else { - assert.Equal(t, tc.err, err.Error()) - } - }) - } -} diff --git a/model/objects.go b/model/objects.go new file mode 100644 index 0000000..ecfba00 --- /dev/null +++ b/model/objects.go @@ -0,0 +1,260 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" +) + +// ObjectOrString is a type that can hold either a string or an object. +type ObjectOrString struct { + Value interface{} `validate:"object_or_string"` +} + +// UnmarshalJSON unmarshals data into either a string or an object. +func (o *ObjectOrString) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + o.Value = asString + return nil + } + + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + return errors.New("ObjectOrString must be a string or an object") +} + +// MarshalJSON marshals ObjectOrString into JSON. +func (o *ObjectOrString) MarshalJSON() ([]byte, error) { + return json.Marshal(o.Value) +} + +// ObjectOrRuntimeExpr is a type that can hold either a RuntimeExpression or an object. +type ObjectOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"object_or_runtime_expr"` // Custom validation tag. +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or an object. +func (o *ObjectOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + o.Value = runtimeExpr + return nil + } + + // Attempt to decode as a generic object + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("ObjectOrRuntimeExpr must be a runtime expression or an object") +} + +// MarshalJSON marshals ObjectOrRuntimeExpr into JSON. +func (o *ObjectOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := o.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case map[string]interface{}: + return json.Marshal(v) + default: + return nil, fmt.Errorf("ObjectOrRuntimeExpr contains unsupported type") + } +} + +// Validate validates the ObjectOrRuntimeExpr using the custom validation logic. +func (o *ObjectOrRuntimeExpr) Validate() error { + switch v := o.Value.(type) { + case RuntimeExpression: + if !v.IsValid() { + return fmt.Errorf("invalid runtime expression: %s", v.Value) + } + case map[string]interface{}: + if len(v) == 0 { + return fmt.Errorf("object cannot be empty") + } + default: + return fmt.Errorf("unsupported value type for ObjectOrRuntimeExpr") + } + return nil +} + +// StringOrRuntimeExpr is a type that can hold either a RuntimeExpression or a string. +type StringOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"string_or_runtime_expr"` // Custom validation tag. +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or a string. +func (s *StringOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + s.Value = runtimeExpr + return nil + } + + // Attempt to decode as a string + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + s.Value = asString + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("StringOrRuntimeExpr must be a runtime expression or a string") +} + +// MarshalJSON marshals StringOrRuntimeExpr into JSON. +func (s *StringOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := s.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + return json.Marshal(v) + default: + return nil, fmt.Errorf("StringOrRuntimeExpr contains unsupported type") + } +} + +func (s *StringOrRuntimeExpr) String() string { + switch v := s.Value.(type) { + case RuntimeExpression: + return v.String() + case string: + return v + default: + return "" + } +} + +// URITemplateOrRuntimeExpr represents a type that can be a URITemplate or a RuntimeExpression. +type URITemplateOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"uri_template_or_runtime_expr"` // Custom validation. +} + +func NewUriTemplate(uriTemplate string) *URITemplateOrRuntimeExpr { + return &URITemplateOrRuntimeExpr{ + Value: uriTemplate, + } +} + +// UnmarshalJSON unmarshals data into either a URITemplate or a RuntimeExpression. +func (u *URITemplateOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as URITemplate + uriTemplate, err := UnmarshalURITemplate(data) + if err == nil { + u.Value = uriTemplate + return nil + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + u.Value = runtimeExpr + return nil + } + + // Return an error if neither succeeds + return fmt.Errorf("URITemplateOrRuntimeExpr must be a valid URITemplate or RuntimeExpression") +} + +// MarshalJSON marshals URITemplateOrRuntimeExpr into JSON. +func (u *URITemplateOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := u.Value.(type) { + case URITemplate: + return json.Marshal(v.String()) + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + // Attempt to marshal as RuntimeExpression + runtimeExpr := RuntimeExpression{Value: v} + if runtimeExpr.IsValid() { + return json.Marshal(runtimeExpr.String()) + } + // Otherwise, treat as a Literal URI + uriTemplate, err := UnmarshalURITemplate([]byte(fmt.Sprintf(`"%s"`, v))) + if err == nil { + return json.Marshal(uriTemplate.String()) + } + return nil, fmt.Errorf("invalid string for URITemplateOrRuntimeExpr: %s", v) + default: + return nil, fmt.Errorf("unsupported type for URITemplateOrRuntimeExpr: %T", v) + } +} + +func (u *URITemplateOrRuntimeExpr) String() string { + switch v := u.Value.(type) { + case URITemplate: + return v.String() + case RuntimeExpression: + return v.String() + } + return "" +} + +// JsonPointerOrRuntimeExpression represents a type that can be a JSON Pointer or a RuntimeExpression. +type JsonPointerOrRuntimeExpression struct { + Value interface{} `json:"-" validate:"json_pointer_or_runtime_expr"` // Custom validation tag. +} + +// JSONPointerPattern validates JSON Pointers as per RFC 6901. +var JSONPointerPattern = regexp.MustCompile(`^(/([^/~]|~[01])*)*$`) + +// UnmarshalJSON unmarshals data into either a JSON Pointer or a RuntimeExpression. +func (j *JsonPointerOrRuntimeExpression) UnmarshalJSON(data []byte) error { + // Attempt to decode as a JSON Pointer + var jsonPointer string + if err := json.Unmarshal(data, &jsonPointer); err == nil { + if JSONPointerPattern.MatchString(jsonPointer) { + j.Value = jsonPointer + return nil + } + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil { + if runtimeExpr.IsValid() { + j.Value = runtimeExpr + return nil + } + } + + // If neither succeeds, return an error + return fmt.Errorf("JsonPointerOrRuntimeExpression must be a valid JSON Pointer or RuntimeExpression") +} + +// MarshalJSON marshals JsonPointerOrRuntimeExpression into JSON. +func (j *JsonPointerOrRuntimeExpression) MarshalJSON() ([]byte, error) { + switch v := j.Value.(type) { + case string: // JSON Pointer + return json.Marshal(v) + case RuntimeExpression: + return json.Marshal(v.String()) + default: + return nil, fmt.Errorf("JsonPointerOrRuntimeExpression contains unsupported type") + } +} diff --git a/model/objects_test.go b/model/objects_test.go new file mode 100644 index 0000000..c77d3bb --- /dev/null +++ b/model/objects_test.go @@ -0,0 +1,190 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestObjectOrRuntimeExpr_UnmarshalJSON(t *testing.T) { + cases := []struct { + Name string + JSON string + Expected interface{} + ShouldErr bool + }{ + { + Name: "Unmarshal valid string", + JSON: `"${ expression }"`, + Expected: RuntimeExpression{Value: "${ expression }"}, + ShouldErr: false, + }, + { + Name: "Unmarshal valid object", + JSON: `{ + "key": "value" + }`, + Expected: map[string]interface{}{ + "key": "value", + }, + ShouldErr: false, + }, + { + Name: "Unmarshal invalid type", + JSON: `123`, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + var obj ObjectOrRuntimeExpr + err := json.Unmarshal([]byte(tc.JSON), &obj) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + assert.Equal(t, tc.Expected, obj.Value, "unexpected unmarshalled value") + } + }) + } +} + +func TestURITemplateOrRuntimeExprValidation(t *testing.T) { + cases := []struct { + Name string + Input *URITemplateOrRuntimeExpr + ShouldErr bool + }{ + { + Name: "Valid URI template", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + }, + ShouldErr: false, + }, + { + Name: "Valid URI", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "http://example.com"}, + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Invalid URI format", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "invalid-uri"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: &URITemplateOrRuntimeExpr{ + Value: 123, + }, + ShouldErr: true, + }, + { + Name: "Valid URI as string", + Input: &URITemplateOrRuntimeExpr{ + Value: "http://example.com", + }, + ShouldErr: false, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "uri_template_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} + +func TestJsonPointerOrRuntimeExpressionValidation(t *testing.T) { + cases := []struct { + Name string + Input JsonPointerOrRuntimeExpression + ShouldErr bool + }{ + { + Name: "Valid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "/valid/json/pointer", + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "invalid-json-pointer", + }, + ShouldErr: true, + }, + { + Name: "Invalid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: JsonPointerOrRuntimeExpression{ + Value: 123, + }, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "json_pointer_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} diff --git a/model/operation_state.go b/model/operation_state.go deleted file mode 100644 index c530ad8..0000000 --- a/model/operation_state.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// OperationState defines a set of actions to be performed in sequence or in parallel. -// +builder-gen:new-call=ApplyDefault -type OperationState struct { - // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions" validate:"min=0,dive"` - // State specific timeouts - // +optional - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` -} - -func (a *OperationState) MarshalJSON() ([]byte, error) { - type Alias OperationState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(a), - Timeouts: a.Timeouts, - }) - return custom, err -} - -type operationStateUnmarshal OperationState - -// UnmarshalJSON unmarshal OperationState object from json bytes -func (o *OperationState) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) -} - -// ApplyDefault set the default values for Operation State -func (o *OperationState) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// OperationStateTimeout defines the specific timeout settings for operation state -type OperationStateTimeout struct { - // Defines workflow state execution timeout. - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/operation_state_test.go b/model/operation_state_test.go deleted file mode 100644 index 4939797..0000000 --- a/model/operation_state_test.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestOperationStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OperationState - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"actionMode": "parallel"}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "actionMode unset", - data: `{}`, - expect: OperationState{ - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `{"actionMode": parallel}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: `invalid character 'p' looking for beginning of value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OperationState{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/operation_state_validator_test.go b/model/operation_state_validator_test.go deleted file mode 100644 index 5da6dba..0000000 --- a/model/operation_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildOperationState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: ActionModeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOperationStateTimeout(state *State) *OperationStateTimeout { - state.OperationState.Timeouts = &OperationStateTimeout{ - ActionExecTimeout: "PT5S", - } - return state.OperationState.Timeouts -} - -func TestOperationStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions = []Action{} - return *model - }, - Err: ``, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].actionMode need by one of [sequential parallel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOperationStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - operationStateTimeout := buildOperationStateTimeout(operationState) - buildStateExecTimeoutByOperationStateTimeout(operationStateTimeout) - - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/parallel_state.go b/model/parallel_state.go deleted file mode 100644 index f65b7a1..0000000 --- a/model/parallel_state.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// CompletionType define on how to complete branch execution. -type CompletionType string - -func (i CompletionType) KindValues() []string { - return []string{ - string(CompletionTypeAllOf), - string(CompletionTypeAtLeast), - } -} - -func (i CompletionType) String() string { - return string(i) -} - -const ( - // CompletionTypeAllOf defines all branches must complete execution before the state can transition/end. - CompletionTypeAllOf CompletionType = "allOf" - // CompletionTypeAtLeast defines state can transition/end once at least the specified number of branches - // have completed execution. - CompletionTypeAtLeast CompletionType = "atLeast" -) - -// ParallelState Consists of a number of states that are executed in parallel -// +builder-gen:new-call=ApplyDefault -type ParallelState struct { - // List of branches for this parallel state. - // +kubebuilder:validation:MinItems=1 - Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. Defaults to `allOf`. - // +kubebuilder:validation:Enum=allOf;atLeast - // +kubebuilder:default=allOf - CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneofkind"` - // Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete - // in order for the state to transition/end. - // +optional - // TODO: change this field to unmarshal result as int - NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` - // State specific timeouts - // +optional - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` -} - -func (p *ParallelState) MarshalJSON() ([]byte, error) { - type Alias ParallelState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(p), - Timeouts: p.Timeouts, - }) - return custom, err -} - -type parallelStateUnmarshal ParallelState - -// UnmarshalJSON unmarshal ParallelState object from json bytes -func (ps *ParallelState) UnmarshalJSON(data []byte) error { - ps.ApplyDefault() - return util.UnmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) -} - -// ApplyDefault set the default values for Parallel State -func (ps *ParallelState) ApplyDefault() { - ps.CompletionType = CompletionTypeAllOf -} - -// Branch Definition -type Branch struct { - // Branch name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Actions to be executed in this branch - // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions" validate:"required,min=1,dive"` - // Branch specific timeout settings - // +optional - Timeouts *BranchTimeouts `json:"timeouts,omitempty"` -} - -// BranchTimeouts defines the specific timeout settings for branch -type BranchTimeouts struct { - // Single actions definition execution timeout duration (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Single branch execution timeout duration (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// ParallelStateTimeout defines the specific timeout settings for parallel state -type ParallelStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single branch execution timeout (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go deleted file mode 100644 index b95cc69..0000000 --- a/model/parallel_state_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestParallelStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ParallelState - err string - } - testCases := []testCase{ - { - desp: "all field set", - data: `{"completionType": "allOf", "numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - { - desp: "all optional field not set", - data: `{"numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ParallelState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/parallel_state_validator.go b/model/parallel_state_validator.go deleted file mode 100644 index 5999071..0000000 --- a/model/parallel_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(parallelStateStructLevelValidation, ParallelState{}) -} - -// ParallelStateStructLevelValidation custom validator for ParallelState -func parallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - parallelStateObj := structLevel.Current().Interface().(ParallelState) - - if parallelStateObj.CompletionType == CompletionTypeAtLeast { - if !val.ValidateGt0IntStr(¶llelStateObj.NumCompleted) { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "NumCompleted", "gt0", "") - } - } -} diff --git a/model/parallel_state_validator_test.go b/model/parallel_state_validator_test.go deleted file mode 100644 index d1acea9..0000000 --- a/model/parallel_state_validator_test.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildParallelState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeParallel, - }, - ParallelState: &ParallelState{ - CompletionType: CompletionTypeAllOf, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildBranch(state *State, name string) *Branch { - branch := Branch{ - Name: name, - } - - state.ParallelState.Branches = append(state.ParallelState.Branches, branch) - return &state.ParallelState.Branches[len(state.ParallelState.Branches)-1] -} - -func buildBranchTimeouts(branch *Branch) *BranchTimeouts { - branch.Timeouts = &BranchTimeouts{} - return branch.Timeouts -} - -func buildParallelStateTimeout(state *State) *ParallelStateTimeout { - state.ParallelState.Timeouts = &ParallelStateTimeout{ - BranchExecTimeout: "PT5S", - } - return state.ParallelState.Timeouts -} - -func TestParallelStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success completionTypeAllOf", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "success completionTypeAtLeast", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - model.States[0].ParallelState.NumCompleted = intstr.FromInt(1) - return *model - }, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + " invalid" - return *model - }, - Err: `workflow.states[0].parallelState.completionType need by one of [allOf atLeast]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = nil - model.States[0].ParallelState.CompletionType = "" - return *model - }, - Err: `workflow.states[0].parallelState.branches is required -workflow.states[0].parallelState.completionType is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = []Branch{} - return *model - }, - Err: `workflow.states[0].parallelState.branches must have the minimum 1`, - }, - { - Desp: "required numCompleted", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - return *model - }, - Err: `workflow.states[0].parallelState.numCompleted must be greater than 0`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Name = "" - model.States[0].ParallelState.Branches[0].Actions = nil - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].name is required -workflow.states[0].parallelState.branches[0].actions is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Actions = []Action{} - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].actions must have the minimum 1`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchTimeoutsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - buildBranchTimeouts(branch) - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "PT5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "PT5S" - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "P5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].parallelState.branches[0].timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestParallelStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildParallelStateTimeout(parallelState) - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/retry.go b/model/retry.go deleted file mode 100644 index 9fe6e78..0000000 --- a/model/retry.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -// Retry ... -// +builder-gen:new-call=ApplyDefault -type Retry struct { - // Unique retry strategy name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Time delay between retry attempts (ISO 8601 duration format) - Delay string `json:"delay,omitempty" validate:"omitempty,iso8601duration"` - // Maximum time delay between retry attempts (ISO 8601 duration format) - MaxDelay string `json:"maxDelay,omitempty" validate:"omitempty,iso8601duration"` - // Static value by which the delay increases during each attempt (ISO 8601 time format) - Increment string `json:"increment,omitempty" validate:"omitempty,iso8601duration"` - // Numeric value, if specified the delay between retries is multiplied by this value. - // +optional - Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` - // Maximum number of retry attempts. - // +kubebuilder:validation:Required - MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` - // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) - // TODO: make iso8601duration compatible this type - Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` -} - -type retryUnmarshal Retry - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retry) UnmarshalJSON(data []byte) error { - r.ApplyDefault() - return util.UnmarshalObject("retry", data, (*retryUnmarshal)(r)) -} - -func (r *Retry) ApplyDefault() { - r.MaxAttempts = intstr.FromInt32(1) -} diff --git a/model/retry_test.go b/model/retry_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/retry_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/retry_validator.go b/model/retry_validator.go deleted file mode 100644 index bd2e755..0000000 --- a/model/retry_validator.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidation(retryStructLevelValidation, Retry{}) - val.GetValidator().RegisterStructValidation(floatstr.ValidateFloat32OrString, Retry{}) -} - -// RetryStructLevelValidation custom validator for Retry Struct -func retryStructLevelValidation(structLevel validator.StructLevel) { - retryObj := structLevel.Current().Interface().(Retry) - - if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { - err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") - } - } -} diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go deleted file mode 100644 index 8b73243..0000000 --- a/model/retry_validator_test.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -func TestRetryStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildRetryRef(baseWorkflow, action1, "retry 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "PT5S" - model.Retries[0].MaxDelay = "PT5S" - model.Retries[0].Increment = "PT5S" - model.Retries[0].Jitter = floatstr.FromString("0.5") - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Name = "" - model.States[0].OperationState.Actions[0].RetryRef = "" - return *model - }, - Err: `workflow.retries[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries = append(model.Retries, model.Retries[0]) - return *model - }, - Err: `workflow.retries has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].RetryRef = "invalid retry" - return *model - }, - Err: `workflow.states[0].actions[0].retryRef don't exist "invalid retry"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "P5S" - model.Retries[0].MaxDelay = "P5S" - model.Retries[0].Increment = "P5S" - model.Retries[0].Jitter = floatstr.FromString("P5S") - - return *model - }, - Err: `workflow.retries[0].delay invalid iso8601 duration "P5S" -workflow.retries[0].maxDelay invalid iso8601 duration "P5S" -workflow.retries[0].increment invalid iso8601 duration "P5S"`, - }, - { - Desp: "multiplier less than zero", - Model: func() Workflow { - multiplierZero := floatstr.FromString("0") - model := baseWorkflow.DeepCopy() - model.Retries[0].Multiplier = &multiplierZero - - return *model - }, - Err: `workflow.retries[0].multiplier must have the minimum `, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/runtime_expression.go b/model/runtime_expression.go new file mode 100644 index 0000000..c67a3ef --- /dev/null +++ b/model/runtime_expression.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "github.com/itchyny/gojq" + "strings" +) + +// RuntimeExpression represents a runtime expression. +type RuntimeExpression struct { + Value string `json:"-" validate:"required"` +} + +// NewRuntimeExpression is an alias for NewExpr +var NewRuntimeExpression = NewExpr + +// NewExpr creates a new RuntimeExpression instance +func NewExpr(runtimeExpression string) *RuntimeExpression { + return &RuntimeExpression{Value: runtimeExpression} +} + +// preprocessExpression removes `${}` if present and returns the inner content. +func preprocessExpression(expression string) string { + if strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") { + return strings.TrimSpace(expression[2 : len(expression)-1]) + } + return expression // Return the expression as-is if `${}` are not present +} + +// IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. +func (r *RuntimeExpression) IsValid() bool { + // Preprocess to extract content inside `${}` if present + processedExpr := preprocessExpression(r.Value) + + // Validate the processed expression using gojq + _, err := gojq.Parse(processedExpr) + return err == nil +} + +// UnmarshalJSON implements custom unmarshalling for RuntimeExpression. +func (r *RuntimeExpression) UnmarshalJSON(data []byte) error { + // Decode the input as a string + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RuntimeExpression: %w", err) + } + + // Assign the value + r.Value = raw + + // Validate the runtime expression + if !r.IsValid() { + return fmt.Errorf("invalid runtime expression format: %s", raw) + } + + return nil +} + +// MarshalJSON implements custom marshalling for RuntimeExpression. +func (r *RuntimeExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(r.Value) +} + +func (r *RuntimeExpression) String() string { + return r.Value +} diff --git a/model/runtime_expression_test.go b/model/runtime_expression_test.go new file mode 100644 index 0000000..296e1de --- /dev/null +++ b/model/runtime_expression_test.go @@ -0,0 +1,70 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRuntimeExpressionUnmarshalJSON(t *testing.T) { + tests := []struct { + Name string + JSONInput string + Expected string + ExpectErr bool + }{ + { + Name: "Valid RuntimeExpression", + JSONInput: `{ "expression": "${runtime.value}" }`, + Expected: "${runtime.value}", + ExpectErr: false, + }, + { + Name: "Invalid RuntimeExpression", + JSONInput: `{ "expression": "1234invalid_runtime" }`, + Expected: "", + ExpectErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.Name, func(t *testing.T) { + var acme *RuntimeExpressionAcme + err := json.Unmarshal([]byte(tc.JSONInput), &acme) + + if tc.ExpectErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, acme.Expression.Value) + } + + // Test marshalling + if !tc.ExpectErr { + output, err := json.Marshal(acme) + assert.NoError(t, err) + assert.JSONEq(t, tc.JSONInput, string(output)) + } + }) + } +} + +// EndpointAcme represents a struct using URITemplate. +type RuntimeExpressionAcme struct { + Expression RuntimeExpression `json:"expression"` +} diff --git a/model/sleep_state.go b/model/sleep_state.go deleted file mode 100644 index 5d144c5..0000000 --- a/model/sleep_state.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// SleepState suspends workflow execution for a given time duration. -type SleepState struct { - // Duration (ISO 8601 duration format) to sleep - // +kubebuilder:validation:Required - Duration string `json:"duration" validate:"required,iso8601duration"` - // Timeouts State specific timeouts - // +optional - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SleepState) MarshalJSON() ([]byte, error) { - type Alias SleepState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - return custom, err -} - -// SleepStateTimeout defines timeout settings for sleep state -type SleepStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/sleep_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/sleep_state_validator_test.go b/model/sleep_state_validator_test.go deleted file mode 100644 index 057d6b3..0000000 --- a/model/sleep_state_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildSleepState(workflow *Workflow, name, duration string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSleep, - }, - SleepState: &SleepState{ - Duration: duration, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildSleepStateTimeout(state *State) *SleepStateTimeout { - state.SleepState.Timeouts = &SleepStateTimeout{} - return state.SleepState.Timeouts -} - -func TestSleepStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "" - return *model - }, - Err: `workflow.states[0].sleepState.duration is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "P5S" - return *model - }, - Err: `workflow.states[0].sleepState.duration invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - sleepStateTimeout := buildSleepStateTimeout(sleepState) - buildStateExecTimeoutBySleepStateTimeout(sleepStateTimeout) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go deleted file mode 100644 index 0a53fd8..0000000 --- a/model/state_exec_timeout.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// StateExecTimeout defines workflow state execution timeout -type StateExecTimeout struct { - // Single state execution timeout, not including retries (ISO 8601 duration format) - // +optional - Single string `json:"single,omitempty" validate:"omitempty,iso8601duration"` - // Total state execution timeout, including retries (ISO 8601 duration format) - // +kubebuilder:validation:Required - Total string `json:"total" validate:"required,iso8601duration"` -} - -type stateExecTimeoutUnmarshal StateExecTimeout - -// UnmarshalJSON unmarshal StateExecTimeout object from json bytes -func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) -} diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go deleted file mode 100644 index 6030395..0000000 --- a/model/state_exec_timeout_test.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - - expect *StateExecTimeout - err string - } - testCases := []testCase{ - { - desp: "normal string", - data: `"PT10S"`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total", - data: `{ - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total & single", - data: `{ - "single": "PT1S", - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "PT1S", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "invalid string or object", - data: `PT10S`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout has a syntax error "invalid character 'P' looking for beginning of value"`, - }, - { - desp: "invalid total type", - data: `{ - "single": "PT1S", - "total": 10 - }`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout.total must be string`, - }, - { - desp: "invalid single type", - data: `{ - "single": 1, - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: `stateExecTimeout.single must be string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - actual := &StateExecTimeout{} - err := actual.UnmarshalJSON([]byte(tc.data)) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, actual) - }) - } -} diff --git a/model/state_exec_timeout_validator_test.go b/model/state_exec_timeout_validator_test.go deleted file mode 100644 index 5a2f794..0000000 --- a/model/state_exec_timeout_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildStateExecTimeoutByTimeouts(timeouts *Timeouts) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutBySleepStateTimeout(timeouts *SleepStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutByOperationStateTimeout(timeouts *OperationStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.ActionExecTimeout = "PT5S" - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func TestStateExecTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - timeouts := buildTimeouts(baseWorkflow) - buildStateExecTimeoutByTimeouts(timeouts) - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "" - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.total is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "P5S" - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "P5S" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.single invalid iso8601 duration "P5S" -workflow.timeouts.stateExecTimeout.total invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/states.go b/model/states.go deleted file mode 100644 index a19429d..0000000 --- a/model/states.go +++ /dev/null @@ -1,283 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// StateType ... -type StateType string - -func (s StateType) KindValues() []string { - return []string{ - string(StateTypeDelay), - string(StateTypeEvent), - string(StateTypeOperation), - string(StateTypeParallel), - string(StateTypeSwitch), - string(StateTypeForEach), - string(StateTypeInject), - string(StateTypeCallback), - string(StateTypeSleep), - } -} - -func (s StateType) String() string { - return string(s) -} - -const ( - // StateTypeDelay ... - StateTypeDelay StateType = "delay" - // StateTypeEvent ... - StateTypeEvent StateType = "event" - // StateTypeOperation ... - StateTypeOperation StateType = "operation" - // StateTypeParallel ... - StateTypeParallel StateType = "parallel" - // StateTypeSwitch ... - StateTypeSwitch StateType = "switch" - // StateTypeForEach ... - StateTypeForEach StateType = "foreach" - // StateTypeInject ... - StateTypeInject StateType = "inject" - // StateTypeCallback ... - StateTypeCallback StateType = "callback" - // StateTypeSleep ... - StateTypeSleep StateType = "sleep" -) - -// BaseState ... -type BaseState struct { - // Unique State id. - // +optional - ID string `json:"id,omitempty"` - // State name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // stateType can be any of delay, callback, event, foreach, inject, operation, parallel, sleep, switch - // +kubebuilder:validation:Enum:=delay;callback;event;foreach;inject;operation;parallel;sleep;switch - // +kubebuilder:validation:Required - Type StateType `json:"type" validate:"required,oneofkind"` - // States error handling and retries definitions. - // +optional - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` - // Next transition of the workflow after the time delay. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // State data filter. - // +optional - StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` - // Unique Name of a workflow state which is responsible for compensation of this state. - // +optional - CompensatedBy string `json:"compensatedBy,omitempty"` - // If true, this state is used to compensate another state. Default is false. - // +optional - UsedForCompensation bool `json:"usedForCompensation,omitempty"` - // State end definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` - // Metadata information. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata *Metadata `json:"metadata,omitempty"` -} - -func (b *BaseState) MarshalJSON() ([]byte, error) { - type Alias BaseState - if b == nil { - return nil, nil - } - cus, err := json.Marshal(struct { - *Alias - }{ - Alias: (*Alias)(b), - }) - return cus, err -} - -// +builder-gen:embedded-ignore-method=BaseState -type State struct { - BaseState `json:",inline"` - // delayState Causes the workflow execution to delay for a specified duration. - // +optional - *DelayState `json:"delayState,omitempty"` - // event states await one or more events and perform actions when they are received. If defined as the - // workflow starting state, the event state definition controls when the workflow instances should be created. - // +optional - *EventState `json:"eventState,omitempty"` - // operationState defines a set of actions to be performed in sequence or in parallel. - // +optional - *OperationState `json:"operationState,omitempty"` - // parallelState Consists of a number of states that are executed in parallel. - // +optional - *ParallelState `json:"parallelState,omitempty"` - // switchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. - // +optional - *SwitchState `json:"switchState,omitempty"` - // forEachState used to execute actions for each element of a data set. - // +optional - *ForEachState `json:"forEachState,omitempty"` - // injectState used to inject static data into state data input. - // +optional - *InjectState `json:"injectState,omitempty"` - // callbackState executes a function and waits for callback event that indicates completion of the task. - // +optional - *CallbackState `json:"callbackState,omitempty"` - // sleepState suspends workflow execution for a given time duration. - // +optional - *SleepState `json:"sleepState,omitempty"` -} - -func (s *State) MarshalJSON() ([]byte, error) { - if s == nil { - return nil, nil - } - r := []byte("") - var errs error - - if s.DelayState != nil { - r, errs = s.DelayState.MarshalJSON() - } - - if s.EventState != nil { - r, errs = s.EventState.MarshalJSON() - } - - if s.OperationState != nil { - r, errs = s.OperationState.MarshalJSON() - } - - if s.ParallelState != nil { - r, errs = s.ParallelState.MarshalJSON() - } - - if s.SwitchState != nil { - r, errs = s.SwitchState.MarshalJSON() - } - - if s.ForEachState != nil { - r, errs = s.ForEachState.MarshalJSON() - } - - if s.InjectState != nil { - r, errs = s.InjectState.MarshalJSON() - } - - if s.CallbackState != nil { - r, errs = s.CallbackState.MarshalJSON() - } - - if s.SleepState != nil { - r, errs = s.SleepState.MarshalJSON() - } - - b, err := s.BaseState.MarshalJSON() - if err != nil { - return nil, err - } - - //remove }{ as BaseState and the State Type needs to be merged together - partialResult := append(b, r...) - result := strings.Replace(string(partialResult), "}{", ",", 1) - return []byte(result), errs -} - -type unmarshalState State - -// UnmarshalJSON implements json.Unmarshaler -func (s *State) UnmarshalJSON(data []byte) error { - if err := util.UnmarshalObject("state", data, (*unmarshalState)(s)); err != nil { - return err - } - - switch s.Type { - case StateTypeDelay: - state := &DelayState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.DelayState = state - - case StateTypeEvent: - state := &EventState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.EventState = state - - case StateTypeOperation: - state := &OperationState{} - if err := util.UnmarshalObject("states", data, state); err != nil { - return err - } - s.OperationState = state - - case StateTypeParallel: - state := &ParallelState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ParallelState = state - - case StateTypeSwitch: - state := &SwitchState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SwitchState = state - - case StateTypeForEach: - state := &ForEachState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ForEachState = state - - case StateTypeInject: - state := &InjectState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.InjectState = state - - case StateTypeCallback: - state := &CallbackState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.CallbackState = state - - case StateTypeSleep: - state := &SleepState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SleepState = state - default: - return fmt.Errorf("states type %q not supported", s.Type.String()) - } - return nil -} diff --git a/model/states_validator.go b/model/states_validator.go deleted file mode 100644 index 1bb58e5..0000000 --- a/model/states_validator.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(baseStateStructLevelValidationCtx), BaseState{}) -} - -func baseStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - baseState := structLevel.Current().Interface().(BaseState) - if baseState.Type != StateTypeSwitch && !baseState.UsedForCompensation { - validTransitionAndEnd(structLevel, baseState, baseState.Transition, baseState.End) - } - - if baseState.CompensatedBy != "" { - if baseState.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagRecursiveCompensation, "") - } - - if ctx.ExistState(baseState.CompensatedBy) { - value := ctx.States[baseState.CompensatedBy].BaseState - if value.UsedForCompensation && value.Type == StateTypeEvent { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedbyEventState, "") - - } else if !value.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedby, "") - } - - } else { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagExists, "") - } - } -} diff --git a/model/states_validator_test.go b/model/states_validator_test.go deleted file mode 100644 index 8766d87..0000000 --- a/model/states_validator_test.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func TestBaseStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 3) - - operationState := buildOperationState(baseWorkflow, "start state 1") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "state 2") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - eventState := buildEventState(baseWorkflow, "state 3") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat name", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States = []State{model.States[0], model.States[0]} - return *model - }, - Err: `workflow.states has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = "invalid state compensate by" - return *model - }, - Err: `workflow.states[0].compensatedBy don't exist "invalid state compensate by"`, - }, - { - Desp: "tagcompensatedby", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = model.States[1].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 2" is not defined as usedForCompensation`, - }, - { - Desp: "compensatedbyeventstate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[2].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[2].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 3" is defined as usedForCompensation and cannot be an event state`, - }, - { - Desp: "recursivecompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "start state 1" is defined as usedForCompensation (cannot themselves set their compensatedBy)`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End = nil - return *model - }, - Err: `workflow.states[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByState(&model.States[0], &model.States[1], false) - - return *model - }, - Err: `workflow.states[0].transition exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Type = StateTypeOperation + "invalid" - return *model - }, - Err: `workflow.states[0].type need by one of [delay event operation parallel switch foreach inject callback sleep]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/switch_state.go b/model/switch_state.go deleted file mode 100644 index 88d0c83..0000000 --- a/model/switch_state.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -type EventConditions []EventCondition - -// SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. -type SwitchState struct { - // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. - - // Default transition of the workflow if there is no matching data conditions. Can include a transition or - // end definition. - DefaultCondition DefaultCondition `json:"defaultCondition"` - // Defines conditions evaluated against events. - // +optional - EventConditions EventConditions `json:"eventConditions" validate:"dive"` - // Defines conditions evaluated against data - // +optional - DataConditions []DataCondition `json:"dataConditions" validate:"dive"` - // SwitchState specific timeouts - // +optional - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SwitchState) MarshalJSON() ([]byte, error) { - type Alias SwitchState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - - // Avoid marshal empty objects as null. - st := strings.Replace(string(custom), "\"eventConditions\":null,", "", 1) - st = strings.Replace(st, "\"dataConditions\":null,", "", 1) - st = strings.Replace(st, "\"end\":null,", "", -1) - return []byte(st), err -} - -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - // Each state can define a transition definition that is used to determine which state to transition to next. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // If this state an end state - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` -} - -type defaultConditionUnmarshal DefaultCondition - -// UnmarshalJSON implements json.Unmarshaler -func (e *DefaultCondition) UnmarshalJSON(data []byte) error { - var nextState string - err := util.UnmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) - if err != nil { - return err - } - - if nextState != "" { - e.Transition = &Transition{NextState: nextState} - } - - return err -} - -// SwitchStateTimeout defines the specific timeout settings for switch state -type SwitchStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. - // NOTE: this is only available for EventConditions - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// EventCondition specify events which the switch state must wait for. -type EventCondition struct { - // Event condition name. - // +optional - Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` - // Metadata information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition" validate:"omitempty"` -} - -// DataCondition specify a data-based condition statement which causes a transition to another workflow state -// if evaluated to true. -type DataCondition struct { - // Data condition name. - // +optional - Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false. - // +kubebuilder:validation:Required - Condition string `json:"condition" validate:"required"` - // Metadata information. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - Transition *Transition `json:"transition,omitempty" validate:"omitempty"` -} diff --git a/model/switch_state_test.go b/model/switch_state_test.go deleted file mode 100644 index e2f5c51..0000000 --- a/model/switch_state_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDefaultConditionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect DefaultCondition - err string - } - - testCases := []testCase{ - { - desp: "json nextState success", - data: `{"transition": {"nextState": "next state"}}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json nextState", - data: `{"transition": {"nextState": "next state}}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json nextState type", - data: `{"transition": {"nextState": true}}`, - err: `transition.nextState must be string`, - }, - { - desp: "transition json success", - data: `{"transition": "next state"}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json transition", - data: `{"transition": "next state}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json transition type", - data: `{"transition": true}`, - err: `transition must be string or object`, - }, - { - desp: "string success", - data: `"next state"`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid string syntax", - data: `"next state`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid type", - data: `123`, - err: `defaultCondition must be string or object`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DefaultCondition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/switch_state_validator.go b/model/switch_state_validator.go deleted file mode 100644 index 5738104..0000000 --- a/model/switch_state_validator.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(switchStateStructLevelValidation), SwitchState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(defaultConditionStructLevelValidation), DefaultCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventConditionStructLevelValidationCtx), EventCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(dataConditionStructLevelValidation), DataCondition{}) -} - -// SwitchStateStructLevelValidation custom validator for SwitchState -func switchStateStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - switchState := structLevel.Current().Interface().(SwitchState) - - switch { - case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagRequired, "") - case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagExclusive, "") - } -} - -// DefaultConditionStructLevelValidation custom validator for DefaultCondition -func defaultConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - defaultCondition := structLevel.Current().Interface().(DefaultCondition) - validTransitionAndEnd(structLevel, defaultCondition, defaultCondition.Transition, defaultCondition.End) -} - -// EventConditionStructLevelValidation custom validator for EventCondition -func eventConditionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - eventCondition := structLevel.Current().Interface().(EventCondition) - validTransitionAndEnd(structLevel, eventCondition, eventCondition.Transition, eventCondition.End) - - if eventCondition.EventRef != "" && !ctx.ExistEvent(eventCondition.EventRef) { - structLevel.ReportError(eventCondition.EventRef, "eventRef", "EventRef", val.TagExists, "") - } -} - -// DataConditionStructLevelValidation custom validator for DataCondition -func dataConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - dataCondition := structLevel.Current().Interface().(DataCondition) - validTransitionAndEnd(structLevel, dataCondition, dataCondition.Transition, dataCondition.End) -} diff --git a/model/switch_state_validator_test.go b/model/switch_state_validator_test.go deleted file mode 100644 index 9c40462..0000000 --- a/model/switch_state_validator_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildSwitchState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSwitch, - }, - SwitchState: &SwitchState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildDefaultCondition(state *State) *DefaultCondition { - state.SwitchState.DefaultCondition = DefaultCondition{} - return &state.SwitchState.DefaultCondition -} - -func buildDataCondition(state *State, name, condition string) *DataCondition { - if state.SwitchState.DataConditions == nil { - state.SwitchState.DataConditions = []DataCondition{} - } - - dataCondition := DataCondition{ - Name: name, - Condition: condition, - } - - state.SwitchState.DataConditions = append(state.SwitchState.DataConditions, dataCondition) - return &state.SwitchState.DataConditions[len(state.SwitchState.DataConditions)-1] -} - -func buildEventCondition(workflow *Workflow, state *State, name, eventRef string) (*Event, *EventCondition) { - workflow.Events = append(workflow.Events, Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindConsumed, - }) - - eventCondition := EventCondition{ - Name: name, - EventRef: eventRef, - } - - state.SwitchState.EventConditions = append(state.SwitchState.EventConditions, eventCondition) - return &workflow.Events[len(workflow.Events)-1], &state.SwitchState.EventConditions[len(state.SwitchState.EventConditions)-1] -} - -func TestSwitchStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - swithState := buildSwitchState(baseWorkflow, "start state") - defaultCondition := buildDefaultCondition(swithState) - buildEndByDefaultCondition(defaultCondition, true, false) - - dataCondition := buildDataCondition(swithState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventCondition(model, &model.States[0], "event condition", "event 1") - buildEndByEventCondition(&model.States[0].SwitchState.EventConditions[0], true, false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDefaultConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - buildSwitchState(baseWorkflow, "start state") - buildDefaultCondition(&baseWorkflow.States[0]) - - buildDataCondition(&baseWorkflow.States[0], "data condition 1", "1=1") - buildEndByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[0], true, false) - buildDataCondition(&baseWorkflow.States[0], "data condition 2", "1=1") - - buildOperationState(baseWorkflow, "end state") - buildEndByState(&baseWorkflow.States[1], true, false) - buildActionByOperationState(&baseWorkflow.States[1], "action 1") - buildFunctionRef(baseWorkflow, &baseWorkflow.States[1].OperationState.Actions[0], "function 1") - - buildTransitionByDefaultCondition(&baseWorkflow.States[0].SwitchState.DefaultCondition, &baseWorkflow.States[1]) - buildTransitionByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[1], &baseWorkflow.States[1], false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSwitchStateTimeoutStructLevelValidation(t *testing.T) { -} - -func TestEventConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - // switch state - switchState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(switchState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // event condition 1 - _, eventCondition := buildEventCondition(baseWorkflow, switchState, "data condition 1", "event 1") - buildEndByEventCondition(eventCondition, true, false) - - // event condition 2 - _, eventCondition2 := buildEventCondition(baseWorkflow, switchState, "data condition 2", "event 2") - buildEndByEventCondition(eventCondition2, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - // trasition switch state to operation state - buildTransitionByEventCondition(eventCondition, operationState, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].EventRef = "event not found" - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].eventRef don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByEventCondition(&model.States[0].SwitchState.EventConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - // switch state - swithcState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(swithcState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // data condition - dataCondition := buildDataCondition(swithcState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/task.go b/model/task.go new file mode 100644 index 0000000..3bbeb4d --- /dev/null +++ b/model/task.go @@ -0,0 +1,418 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TaskBase struct { + // A runtime expression, if any, used to determine whether or not the task should be run. + If *RuntimeExpression `json:"if,omitempty" validate:"omitempty"` + // Configure the task's input. + Input *Input `json:"input,omitempty" validate:"omitempty"` + // Configure the task's output. + Output *Output `json:"output,omitempty" validate:"omitempty"` + // Export task output to context. + Export *Export `json:"export,omitempty" validate:"omitempty"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" validate:"omitempty"` + // The flow directive to be performed upon completion of the task. + Then *FlowDirective `json:"then,omitempty" validate:"omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` +} + +// Task represents a discrete unit of work in a workflow. +type Task interface{} + +// TaskItem represents a named task and its associated definition. +type TaskItem struct { + Key string `json:"-" validate:"required"` + Task Task `json:"-" validate:"required"` +} + +// MarshalJSON for TaskItem to ensure proper serialization as a key-value pair. +func (ti *TaskItem) MarshalJSON() ([]byte, error) { + if ti == nil { + return nil, fmt.Errorf("cannot marshal a nil TaskItem") + } + + // Serialize the Task + taskJSON, err := json.Marshal(ti.Task) + if err != nil { + return nil, fmt.Errorf("failed to marshal task: %w", err) + } + + // Create a map with the Key and Task + taskEntry := map[string]json.RawMessage{ + ti.Key: taskJSON, + } + + // Marshal the map into JSON + return json.Marshal(taskEntry) +} + +type NamedTaskMap map[string]Task + +// UnmarshalJSON for NamedTaskMap to ensure proper deserialization. +func (ntm *NamedTaskMap) UnmarshalJSON(data []byte) error { + var rawTasks map[string]json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for name, raw := range rawTasks { + task, err := unmarshalTask(name, raw) + if err != nil { + return err + } + + if *ntm == nil { + *ntm = make(map[string]Task) + } + (*ntm)[name] = task + } + + return nil +} + +// TaskList represents a list of named tasks to perform. +type TaskList []*TaskItem + +// UnmarshalJSON for TaskList to ensure proper deserialization. +func (tl *TaskList) UnmarshalJSON(data []byte) error { + var rawTasks []json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for _, raw := range rawTasks { + var taskItemRaw map[string]json.RawMessage + if err := json.Unmarshal(raw, &taskItemRaw); err != nil { + return err + } + + if len(taskItemRaw) != 1 { + return errors.New("each TaskItem must have exactly one key") + } + + for key, taskRaw := range taskItemRaw { + task, err := unmarshalTask(key, taskRaw) + if err != nil { + return err + } + *tl = append(*tl, &TaskItem{Key: key, Task: task}) + } + } + + return nil +} + +var taskTypeRegistry = map[string]func() Task{ + "call_http": func() Task { return &CallHTTP{} }, + "call_openapi": func() Task { return &CallOpenAPI{} }, + "call_grpc": func() Task { return &CallGRPC{} }, + "call_asyncapi": func() Task { return &CallAsyncAPI{} }, + "call": func() Task { return &CallFunction{} }, + "do": func() Task { return &DoTask{} }, + "fork": func() Task { return &ForkTask{} }, + "emit": func() Task { return &EmitTask{} }, + "for": func() Task { return &ForTask{} }, + "listen": func() Task { return &ListenTask{} }, + "raise": func() Task { return &RaiseTask{} }, + "run": func() Task { return &RunTask{} }, + "set": func() Task { return &SetTask{} }, + "switch": func() Task { return &SwitchTask{} }, + "try": func() Task { return &TryTask{} }, + "wait": func() Task { return &WaitTask{} }, +} + +func unmarshalTask(key string, taskRaw json.RawMessage) (Task, error) { + var taskType map[string]interface{} + if err := json.Unmarshal(taskRaw, &taskType); err != nil { + return nil, fmt.Errorf("failed to parse task type for key '%s': %w", key, err) + } + + // Determine task type + var task Task + if callValue, hasCall := taskType["call"].(string); hasCall { + // Form composite key and check if it's in the registry + registryKey := fmt.Sprintf("call_%s", callValue) + if constructor, exists := taskTypeRegistry[registryKey]; exists { + task = constructor() + } else { + // Default to CallFunction for unrecognized call values + task = &CallFunction{} + } + } else { + // Handle non-call tasks (e.g., "do", "fork") + for typeKey := range taskType { + if constructor, exists := taskTypeRegistry[typeKey]; exists { + task = constructor() + break + } + } + } + + if task == nil { + return nil, fmt.Errorf("unknown task type for key '%s'", key) + } + + // Populate the task with raw data + if err := json.Unmarshal(taskRaw, task); err != nil { + return nil, fmt.Errorf("failed to unmarshal task '%s': %w", key, err) + } + + return task, nil +} + +// MarshalJSON for TaskList to ensure proper serialization. +func (tl *TaskList) MarshalJSON() ([]byte, error) { + return json.Marshal([]*TaskItem(*tl)) +} + +// Key retrieves a TaskItem by its key. +func (tl *TaskList) Key(key string) *TaskItem { + for _, item := range *tl { + if item.Key == key { + return item + } + } + return nil +} + +// AsTask extracts the TaskBase from the Task if the Task embeds TaskBase. +// Returns nil if the Task does not embed TaskBase. +func (ti *TaskItem) AsTask() *TaskBase { + if ti == nil || ti.Task == nil { + return nil + } + + // Use type assertions to check for TaskBase + switch task := ti.Task.(type) { + case *CallHTTP: + return &task.TaskBase + case *CallOpenAPI: + return &task.TaskBase + case *CallGRPC: + return &task.TaskBase + case *CallAsyncAPI: + return &task.TaskBase + case *CallFunction: + return &task.TaskBase + case *DoTask: + return &task.TaskBase + case *ForkTask: + return &task.TaskBase + case *EmitTask: + return &task.TaskBase + case *ForTask: + return &task.TaskBase + case *ListenTask: + return &task.TaskBase + case *RaiseTask: + return &task.TaskBase + case *RunTask: + return &task.TaskBase + case *SetTask: + return &task.TaskBase + case *SwitchTask: + return &task.TaskBase + case *TryTask: + return &task.TaskBase + case *WaitTask: + return &task.TaskBase + default: + // If the type does not embed TaskBase, return nil + return nil + } +} + +// AsCallHTTPTask casts the Task to a CallTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallHTTPTask() *CallHTTP { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallHTTP); ok { + return task + } + return nil +} + +// AsCallOpenAPITask casts the Task to a CallOpenAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallOpenAPITask() *CallOpenAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallOpenAPI); ok { + return task + } + return nil +} + +// AsCallGRPCTask casts the Task to a CallGRPC task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallGRPCTask() *CallGRPC { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallGRPC); ok { + return task + } + return nil +} + +// AsCallAsyncAPITask casts the Task to a CallAsyncAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallAsyncAPITask() *CallAsyncAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallAsyncAPI); ok { + return task + } + return nil +} + +// AsCallFunctionTask casts the Task to a CallFunction task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallFunctionTask() *CallFunction { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallFunction); ok { + return task + } + return nil +} + +// AsDoTask casts the Task to a DoTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsDoTask() *DoTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*DoTask); ok { + return task + } + return nil +} + +// AsForkTask casts the Task to a ForkTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForkTask() *ForkTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForkTask); ok { + return task + } + return nil +} + +// AsEmitTask casts the Task to an EmitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsEmitTask() *EmitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*EmitTask); ok { + return task + } + return nil +} + +// AsForTask casts the Task to a ForTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForTask() *ForTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForTask); ok { + return task + } + return nil +} + +// AsListenTask casts the Task to a ListenTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsListenTask() *ListenTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ListenTask); ok { + return task + } + return nil +} + +// AsRaiseTask casts the Task to a RaiseTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRaiseTask() *RaiseTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RaiseTask); ok { + return task + } + return nil +} + +// AsRunTask casts the Task to a RunTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRunTask() *RunTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RunTask); ok { + return task + } + return nil +} + +// AsSetTask casts the Task to a SetTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSetTask() *SetTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SetTask); ok { + return task + } + return nil +} + +// AsSwitchTask casts the Task to a SwitchTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSwitchTask() *SwitchTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SwitchTask); ok { + return task + } + return nil +} + +// AsTryTask casts the Task to a TryTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsTryTask() *TryTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*TryTask); ok { + return task + } + return nil +} + +// AsWaitTask casts the Task to a WaitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsWaitTask() *WaitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*WaitTask); ok { + return task + } + return nil +} diff --git a/model/task_call.go b/model/task_call.go new file mode 100644 index 0000000..82412b0 --- /dev/null +++ b/model/task_call.go @@ -0,0 +1,112 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +type CallHTTP struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=http"` + With HTTPArguments `json:"with" validate:"required"` +} + +type HTTPArguments struct { + Method string `json:"method" validate:"required,oneofci=GET POST PUT DELETE PATCH"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` + Headers map[string]string `json:"headers,omitempty"` + Body json.RawMessage `json:"body,omitempty"` + Query map[string]interface{} `json:"query,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallOpenAPI struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=openapi"` + With OpenAPIArguments `json:"with" validate:"required"` +} + +type OpenAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + OperationID string `json:"operationId" validate:"required"` + Parameters map[string]interface{} `json:"parameters,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallGRPC struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=grpc"` + With GRPCArguments `json:"with" validate:"required"` +} + +type GRPCArguments struct { + Proto *ExternalResource `json:"proto" validate:"required"` + Service GRPCService `json:"service" validate:"required"` + Method string `json:"method" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type GRPCService struct { + Name string `json:"name" validate:"required"` + Host string `json:"host" validate:"required,hostname_rfc1123"` + Port int `json:"port" validate:"required,min=0,max=65535"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +type CallAsyncAPI struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=asyncapi"` + With AsyncAPIArguments `json:"with" validate:"required"` +} + +type AsyncAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + Channel string `json:"channel,omitempty"` + Operation string `json:"operation,omitempty"` + Server *AsyncAPIServer `json:"server,omitempty"` + Protocol string `json:"protocol,omitempty" validate:"omitempty,oneof=amqp amqp1 anypointmq googlepubsub http ibmmq jms kafka mercure mqtt mqtt5 nats pulsar redis sns solace sqs stomp ws"` + Message *AsyncAPIOutboundMessage `json:"message,omitempty"` + Subscription *AsyncAPISubscription `json:"subscription,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type AsyncAPIServer struct { + Name string `json:"name" validate:"required"` + Variables map[string]interface{} `json:"variables,omitempty"` +} + +type AsyncAPIOutboundMessage struct { + Payload map[string]interface{} `json:"payload,omitempty" validate:"omitempty"` + Headers map[string]interface{} `json:"headers,omitempty" validate:"omitempty"` +} + +type AsyncAPISubscription struct { + Filter *RuntimeExpression `json:"filter,omitempty"` + Consume *AsyncAPIMessageConsumptionPolicy `json:"consume" validate:"required"` +} + +type AsyncAPIMessageConsumptionPolicy struct { + For *Duration `json:"for,omitempty"` + Amount int `json:"amount,omitempty" validate:"required_without_all=While Until"` + While *RuntimeExpression `json:"while,omitempty" validate:"required_without_all=Amount Until"` + Until *RuntimeExpression `json:"until,omitempty" validate:"required_without_all=Amount While"` +} + +type CallFunction struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required"` + With map[string]interface{} `json:"with,omitempty"` +} diff --git a/model/task_call_test.go b/model/task_call_test.go new file mode 100644 index 0000000..0d10e69 --- /dev/null +++ b/model/task_call_test.go @@ -0,0 +1,480 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCallHTTP_MarshalJSON(t *testing.T) { + callHTTP := CallHTTP{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: string(FlowDirectiveContinue)}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com"}, + }, + Headers: map[string]string{ + "Authorization": "Bearer token", + }, + Query: map[string]interface{}{ + "q": "search", + }, + Output: "content", + }, + } + + data, err := json.Marshal(callHTTP) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }`, string(data)) +} + +func TestCallHTTP_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }` + + var callHTTP CallHTTP + err := json.Unmarshal([]byte(jsonData), &callHTTP) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${condition}"}, callHTTP.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callHTTP.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callHTTP.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callHTTP.Timeout) + assert.Equal(t, &FlowDirective{Value: string(FlowDirectiveContinue)}, callHTTP.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callHTTP.Metadata) + assert.Equal(t, "http", callHTTP.Call) + assert.Equal(t, "GET", callHTTP.With.Method) + assert.Equal(t, "http://example.com", callHTTP.With.Endpoint.String()) + assert.Equal(t, map[string]string{"Authorization": "Bearer token"}, callHTTP.With.Headers) + assert.Equal(t, map[string]interface{}{"q": "search"}, callHTTP.With.Query) + assert.Equal(t, "content", callHTTP.With.Output) +} + +func TestCallOpenAPI_MarshalJSON(t *testing.T) { + authPolicy := "my-auth" + callOpenAPI := CallOpenAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "MyOpenAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/openapi.json"}, + }, + }, + OperationID: "getUsers", + Parameters: map[string]interface{}{ + "param1": "value1", + "param2": "value2", + }, + Authentication: &ReferenceableAuthenticationPolicy{ + Use: &authPolicy, + }, + Output: "content", + }, + } + + data, err := json.Marshal(callOpenAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": "http://example.com/openapi.json" + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }`, string(data)) +} + +func TestCallOpenAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": { "uri": "http://example.com/openapi.json" } + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }` + + var callOpenAPI CallOpenAPI + err := json.Unmarshal([]byte(jsonData), &callOpenAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callOpenAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callOpenAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callOpenAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callOpenAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callOpenAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callOpenAPI.Metadata) + assert.Equal(t, "openapi", callOpenAPI.Call) + assert.Equal(t, "MyOpenAPIDoc", callOpenAPI.With.Document.Name) + assert.Equal(t, "http://example.com/openapi.json", callOpenAPI.With.Document.Endpoint.EndpointConfig.URI.String()) + assert.Equal(t, "getUsers", callOpenAPI.With.OperationID) + assert.Equal(t, map[string]interface{}{"param1": "value1", "param2": "value2"}, callOpenAPI.With.Parameters) + assert.Equal(t, "my-auth", *callOpenAPI.With.Authentication.Use) + assert.Equal(t, "content", callOpenAPI.With.Output) +} + +func TestCallGRPC_MarshalJSON(t *testing.T) { + callGRPC := CallGRPC{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "grpc", + With: GRPCArguments{ + Proto: &ExternalResource{ + Name: "MyProtoFile", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/protofile"}, + }, + }, + Service: GRPCService{ + Name: "UserService", + Host: "example.com", + Port: 50051, + }, + Method: "GetUser", + Arguments: map[string]interface{}{"userId": "12345"}, + }, + } + + data, err := json.Marshal(callGRPC) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }`, string(data)) +} + +func TestCallGRPC_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }` + + var callGRPC CallGRPC + err := json.Unmarshal([]byte(jsonData), &callGRPC) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callGRPC.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callGRPC.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callGRPC.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callGRPC.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callGRPC.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callGRPC.Metadata) + assert.Equal(t, "grpc", callGRPC.Call) + assert.Equal(t, "MyProtoFile", callGRPC.With.Proto.Name) + assert.Equal(t, "http://example.com/protofile", callGRPC.With.Proto.Endpoint.String()) + assert.Equal(t, "UserService", callGRPC.With.Service.Name) + assert.Equal(t, "example.com", callGRPC.With.Service.Host) + assert.Equal(t, 50051, callGRPC.With.Service.Port) + assert.Equal(t, "GetUser", callGRPC.With.Method) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callGRPC.With.Arguments) +} + +func TestCallAsyncAPI_MarshalJSON(t *testing.T) { + callAsyncAPI := CallAsyncAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "asyncapi", + With: AsyncAPIArguments{ + Document: &ExternalResource{ + Name: "MyAsyncAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/asyncapi.json"}, + }, + }, + Operation: "user.signup", + Server: &AsyncAPIServer{Name: "default-server"}, + Message: &AsyncAPIOutboundMessage{Payload: map[string]interface{}{"userId": "12345"}}, + Protocol: "http", + }, + } + + data, err := json.Marshal(callAsyncAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server" }, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + } + } + }`, string(data)) +} + +func TestCallAsyncAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server"}, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + }, + "authentication": { + "use": "asyncapi-auth-policy" + } + } + }` + + var callAsyncAPI CallAsyncAPI + err := json.Unmarshal([]byte(jsonData), &callAsyncAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callAsyncAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callAsyncAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callAsyncAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callAsyncAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callAsyncAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callAsyncAPI.Metadata) + assert.Equal(t, "asyncapi", callAsyncAPI.Call) + assert.Equal(t, "MyAsyncAPIDoc", callAsyncAPI.With.Document.Name) + assert.Equal(t, "http://example.com/asyncapi.json", callAsyncAPI.With.Document.Endpoint.String()) + assert.Equal(t, "user.signup", callAsyncAPI.With.Operation) + assert.Equal(t, "default-server", callAsyncAPI.With.Server.Name) + assert.Equal(t, "http", callAsyncAPI.With.Protocol) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callAsyncAPI.With.Message.Payload) + assert.Equal(t, "asyncapi-auth-policy", *callAsyncAPI.With.Authentication.Use) +} + +func TestCallFunction_MarshalJSON(t *testing.T) { + callFunction := CallFunction{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "myFunction", + With: map[string]interface{}{ + "param1": "value1", + "param2": 42, + }, + } + + data, err := json.Marshal(callFunction) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }`, string(data)) +} + +func TestCallFunction_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }` + + var callFunction CallFunction + err := json.Unmarshal([]byte(jsonData), &callFunction) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callFunction.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callFunction.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callFunction.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callFunction.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callFunction.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callFunction.Metadata) + assert.Equal(t, "myFunction", callFunction.Call) + + // Adjust numeric values for comparison + expectedWith := map[string]interface{}{ + "param1": "value1", + "param2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedWith, callFunction.With) +} diff --git a/model/action_data_filter_validator_test.go b/model/task_do.go similarity index 61% rename from model/action_data_filter_validator_test.go rename to model/task_do.go index df52da0..0b2673d 100644 --- a/model/action_data_filter_validator_test.go +++ b/model/task_do.go @@ -1,10 +1,10 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,8 @@ package model -import "testing" - -func TestActionDataFilterStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) +// DoTask represents a task configuration to execute tasks sequentially. +type DoTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Do *TaskList `json:"do" validate:"required,dive"` } diff --git a/model/task_do_test.go b/model/task_do_test.go new file mode 100644 index 0000000..4a337fe --- /dev/null +++ b/model/task_do_test.go @@ -0,0 +1,103 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDoTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var doTask DoTask + err := json.Unmarshal([]byte(jsonData), &doTask) + assert.NoError(t, err) + + task1 := doTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := doTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestDoTask_MarshalJSON(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(doTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestDoTask_Validation(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(doTask) + assert.Error(t, err) +} diff --git a/model/task_event.go b/model/task_event.go new file mode 100644 index 0000000..8b97388 --- /dev/null +++ b/model/task_event.go @@ -0,0 +1,282 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// EmitTask represents the configuration for emitting events. +type EmitTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Emit EmitTaskConfiguration `json:"emit" validate:"required"` +} + +func (e *EmitTask) MarshalJSON() ([]byte, error) { + type Alias EmitTask // Prevent recursion + return json.Marshal((*Alias)(e)) +} + +// ListenTask represents a task configuration to listen to events. +type ListenTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Listen ListenTaskConfiguration `json:"listen" validate:"required"` +} + +type ListenTaskConfiguration struct { + To *EventConsumptionStrategy `json:"to" validate:"required"` +} + +// MarshalJSON for ListenTask to ensure proper serialization. +func (lt *ListenTask) MarshalJSON() ([]byte, error) { + type Alias ListenTask + return json.Marshal((*Alias)(lt)) +} + +// UnmarshalJSON for ListenTask to ensure proper deserialization. +func (lt *ListenTask) UnmarshalJSON(data []byte) error { + type Alias ListenTask + alias := (*Alias)(lt) + return json.Unmarshal(data, alias) +} + +type EmitTaskConfiguration struct { + Event EmitEventDefinition `json:"event" validate:"required"` +} + +type EmitEventDefinition struct { + With *EventProperties `json:"with" validate:"required"` +} + +type EventProperties struct { + ID string `json:"id,omitempty"` + Source *URITemplateOrRuntimeExpr `json:"source,omitempty" validate:"omitempty"` // URI template or runtime expression + Type string `json:"type,omitempty"` + Time *StringOrRuntimeExpr `json:"time,omitempty" validate:"omitempty,string_or_runtime_expr"` // ISO 8601 date-time string or runtime expression + Subject string `json:"subject,omitempty"` + DataContentType string `json:"datacontenttype,omitempty"` + DataSchema *URITemplateOrRuntimeExpr `json:"dataschema,omitempty" validate:"omitempty"` // URI template or runtime expression + Additional map[string]interface{} `json:"-"` +} + +// UnmarshalJSON implements custom unmarshaling for EventProperties. +func (e *EventProperties) UnmarshalJSON(data []byte) error { + type Alias EventProperties // Prevent recursion + alias := &struct { + Additional map[string]interface{} `json:"-"` // Inline the additional properties + *Alias + }{ + Alias: (*Alias)(e), + } + + // Decode the entire JSON into a map to capture additional properties + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties: %w", err) + } + + // Unmarshal known fields into the alias + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties fields: %w", err) + } + + // Validate fields requiring custom unmarshaling + if e.Source != nil && e.Source.Value == nil { + return fmt.Errorf("invalid Source: must be a valid URI template or runtime expression") + } + + if e.DataSchema != nil && e.DataSchema.Value == nil { + return fmt.Errorf("invalid DataSchema: must be a valid URI template or runtime expression") + } + + // Extract additional properties by removing known keys + for key := range raw { + switch key { + case "id", "source", "type", "time", "subject", "datacontenttype", "dataschema": + delete(raw, key) + } + } + + e.Additional = raw + return nil +} + +// MarshalJSON implements custom marshaling for EventProperties. +func (e *EventProperties) MarshalJSON() ([]byte, error) { + // Create a map for known fields + known := make(map[string]interface{}) + + if e.ID != "" { + known["id"] = e.ID + } + if e.Source != nil { + known["source"] = e.Source + } + if e.Type != "" { + known["type"] = e.Type + } + if e.Time != nil { + known["time"] = e.Time + } + if e.Subject != "" { + known["subject"] = e.Subject + } + if e.DataContentType != "" { + known["datacontenttype"] = e.DataContentType + } + if e.DataSchema != nil { + known["dataschema"] = e.DataSchema + } + + // Merge additional properties + for key, value := range e.Additional { + known[key] = value + } + + return json.Marshal(known) +} + +// EventFilter defines a mechanism to filter events based on predefined criteria. +type EventFilter struct { + With *EventProperties `json:"with" validate:"required"` + Correlate map[string]Correlation `json:"correlate,omitempty" validate:"omitempty,dive"` // Keyed correlation filters +} + +// Correlation defines the mapping of event attributes for correlation. +type Correlation struct { + From string `json:"from" validate:"required"` // Runtime expression to extract the correlation value + Expect string `json:"expect,omitempty"` // Expected value or runtime expression +} + +// EventConsumptionStrategy defines the consumption strategy for events. +type EventConsumptionStrategy struct { + All []*EventFilter `json:"all,omitempty" validate:"omitempty,dive"` + Any []*EventFilter `json:"any,omitempty" validate:"omitempty,dive"` + One *EventFilter `json:"one,omitempty" validate:"omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty" validate:"omitempty"` +} + +// EventConsumptionUntil handles the complex conditions of the "until" field. +type EventConsumptionUntil struct { + Condition *RuntimeExpression `json:"-" validate:"omitempty"` + Strategy *EventConsumptionStrategy `json:"-" validate:"omitempty"` + IsDisabled bool `json:"-"` // True when "until: false" +} + +// UnmarshalJSON for EventConsumptionUntil to handle the "oneOf" behavior. +func (ecu *EventConsumptionUntil) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventConsumptionUntil: %w", err) + } + + switch v := raw.(type) { + case bool: + if !v { + ecu.IsDisabled = true + } else { + return fmt.Errorf("invalid value for 'until': true is not supported") + } + case string: + ecu.Condition = &RuntimeExpression{Value: v} + case map[string]interface{}: + strategyData, err := json.Marshal(v) + if err != nil { + return fmt.Errorf("failed to marshal 'until' strategy: %w", err) + } + var strategy EventConsumptionStrategy + if err := json.Unmarshal(strategyData, &strategy); err != nil { + return fmt.Errorf("failed to unmarshal 'until' strategy: %w", err) + } + ecu.Strategy = &strategy + default: + return fmt.Errorf("invalid type for 'until'") + } + + return nil +} + +// MarshalJSON for EventConsumptionUntil to handle proper serialization. +func (ecu *EventConsumptionUntil) MarshalJSON() ([]byte, error) { + if ecu.IsDisabled { + return json.Marshal(false) + } + if ecu.Condition != nil { + // Serialize the condition directly + return json.Marshal(ecu.Condition.Value) + } + if ecu.Strategy != nil { + // Serialize the nested strategy + return json.Marshal(ecu.Strategy) + } + // Return null if nothing is set + return json.Marshal(nil) +} + +// UnmarshalJSON for EventConsumptionStrategy to enforce "oneOf" behavior and handle edge cases. +func (ecs *EventConsumptionStrategy) UnmarshalJSON(data []byte) error { + temp := struct { + All []*EventFilter `json:"all"` + Any []*EventFilter `json:"any"` + One *EventFilter `json:"one"` + Until *EventConsumptionUntil `json:"until"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields (ignoring empty lists for `all` and `any`) + count := 0 + if len(temp.All) > 0 { + count++ + ecs.All = temp.All + } + if len(temp.Any) > 0 || temp.Until != nil { + count++ + ecs.Any = temp.Any + ecs.Until = temp.Until + } + if temp.One != nil { + count++ + ecs.One = temp.One + } + + // Ensure only one primary field (all, any, one) is set + if count > 1 { + return errors.New("invalid EventConsumptionStrategy: only one primary strategy type (all, any, or one) must be specified") + } + + return nil +} + +// MarshalJSON for EventConsumptionStrategy to ensure proper serialization. +func (ecs *EventConsumptionStrategy) MarshalJSON() ([]byte, error) { + temp := struct { + All []*EventFilter `json:"all,omitempty"` + Any []*EventFilter `json:"any,omitempty"` + One *EventFilter `json:"one,omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty"` + }{ + All: ecs.All, + Any: ecs.Any, + One: ecs.One, + Until: ecs.Until, + } + + return json.Marshal(temp) +} diff --git a/model/task_event_test.go b/model/task_event_test.go new file mode 100644 index 0000000..45c92a7 --- /dev/null +++ b/model/task_event_test.go @@ -0,0 +1,231 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEmitTask_MarshalJSON(t *testing.T) { + emitTask := &EmitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Emit: EmitTaskConfiguration{ + Event: EmitEventDefinition{ + With: &EventProperties{ + ID: "event-id", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + Type: "example.event.type", + Time: &StringOrRuntimeExpr{Value: "2023-01-01T00:00:00Z"}, + Subject: "example.subject", + DataContentType: "application/json", + DataSchema: &URITemplateOrRuntimeExpr{Value: "http://example.com/schema"}, + Additional: map[string]interface{}{ + "extra": "value", + }, + }, + }, + }, + } + + data, err := json.Marshal(emitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }`, string(data)) +} + +func TestEmitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }` + + var emitTask EmitTask + err := json.Unmarshal([]byte(jsonData), &emitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, emitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, emitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, emitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, emitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, emitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, emitTask.Metadata) + assert.Equal(t, "event-id", emitTask.Emit.Event.With.ID) + assert.Equal(t, "http://example.com/source", emitTask.Emit.Event.With.Source.String()) + assert.Equal(t, "example.event.type", emitTask.Emit.Event.With.Type) + assert.Equal(t, "2023-01-01T00:00:00Z", emitTask.Emit.Event.With.Time.String()) + assert.Equal(t, "example.subject", emitTask.Emit.Event.With.Subject) + assert.Equal(t, "application/json", emitTask.Emit.Event.With.DataContentType) + assert.Equal(t, "http://example.com/schema", emitTask.Emit.Event.With.DataSchema.String()) + assert.Equal(t, map[string]interface{}{"extra": "value"}, emitTask.Emit.Event.With.Additional) +} + +func TestListenTask_MarshalJSON_WithUntilCondition(t *testing.T) { + listenTask := ListenTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Listen: ListenTaskConfiguration{ + To: &EventConsumptionStrategy{ + Any: []*EventFilter{ + { + With: &EventProperties{ + Type: "example.event.type", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + }, + }, + }, + Until: &EventConsumptionUntil{ + Condition: NewRuntimeExpression("workflow.data.condition == true"), + }, + }, + }, + } + + data, err := json.Marshal(listenTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "listen": { + "to": { + "any": [ + { + "with": { + "type": "example.event.type", + "source": "http://example.com/source" + } + } + ], + "until": "workflow.data.condition == true" + } + } + }`, string(data)) +} + +func TestEventConsumptionUntil_MarshalJSON(t *testing.T) { + tests := []struct { + name string + until *EventConsumptionUntil + expected string + shouldErr bool + }{ + { + name: "Until Disabled", + until: &EventConsumptionUntil{ + IsDisabled: true, + }, + expected: `false`, + shouldErr: false, + }, + { + name: "Until Condition Set", + until: &EventConsumptionUntil{ + Condition: &RuntimeExpression{Value: "workflow.data.condition == true"}, + }, + expected: `"workflow.data.condition == true"`, + shouldErr: false, + }, + { + name: "Until Nested Strategy", + until: &EventConsumptionUntil{ + Strategy: &EventConsumptionStrategy{ + One: &EventFilter{ + With: &EventProperties{Type: "example.event.type"}, + }, + }, + }, + expected: `{"one":{"with":{"type":"example.event.type"}}}`, + shouldErr: false, + }, + { + name: "Until Nil", + until: &EventConsumptionUntil{}, + expected: `null`, + shouldErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.until) + if test.shouldErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expected, string(data)) + } + }) + } +} diff --git a/model/task_for.go b/model/task_for.go new file mode 100644 index 0000000..0e6811b --- /dev/null +++ b/model/task_for.go @@ -0,0 +1,30 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForTask represents a task configuration to iterate over a collection. +type ForTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + For ForTaskConfiguration `json:"for" validate:"required"` + While string `json:"while,omitempty"` + Do *TaskList `json:"do" validate:"required,dive"` +} + +// ForTaskConfiguration defines the loop configuration for iterating over a collection. +type ForTaskConfiguration struct { + Each string `json:"each,omitempty"` // Variable name for the current item + In string `json:"in" validate:"required"` // Runtime expression for the collection + At string `json:"at,omitempty"` // Variable name for the current index +} diff --git a/model/task_for_test.go b/model/task_for_test.go new file mode 100644 index 0000000..e24bf3b --- /dev/null +++ b/model/task_for_test.go @@ -0,0 +1,150 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "sigs.k8s.io/yaml" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestForTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var forTask ForTask + err := json.Unmarshal([]byte(jsonData), &forTask) + assert.NoError(t, err) + assert.Equal(t, "item", forTask.For.Each) + assert.Equal(t, "${items}", forTask.For.In) + assert.Equal(t, "index", forTask.For.At) + assert.Equal(t, "${condition}", forTask.While) + + task1 := forTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := forTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForTask_MarshalJSON(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(forTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestForTask_Validation(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(forTask) + assert.Error(t, err) +} + +func TestForTaskValidation(t *testing.T) { + rawYaml := ` +for: + each: pet + in: .pets + at: index +while: .vet != null +do: + - waitForCheckup: + listen: + to: + one: + with: + type: com.fake.petclinic.pets.checkup.completed.v2 + output: + as: '.pets + [{ "id": $pet.id }]' +` + + var forTask ForTask + err := yaml.Unmarshal([]byte(rawYaml), &forTask) + assert.NoError(t, err, "Failed to unmarshal ForTask") + + err = validate.Struct(forTask) + assert.NoError(t, err, "Failed to validate ForTask") +} diff --git a/model/task_fork.go b/model/task_fork.go new file mode 100644 index 0000000..3019d06 --- /dev/null +++ b/model/task_fork.go @@ -0,0 +1,27 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForkTask represents a task configuration to execute multiple tasks concurrently. +type ForkTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Fork ForkTaskConfiguration `json:"fork" validate:"required"` +} + +// ForkTaskConfiguration defines the configuration for the branches to perform concurrently. +type ForkTaskConfiguration struct { + Branches *TaskList `json:"branches" validate:"required,dive"` + Compete bool `json:"compete,omitempty"` +} diff --git a/model/task_fork_test.go b/model/task_fork_test.go new file mode 100644 index 0000000..04b4f19 --- /dev/null +++ b/model/task_fork_test.go @@ -0,0 +1,116 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestForkTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ], + "compete": true + } + }` + + var forkTask ForkTask + err := json.Unmarshal([]byte(jsonData), &forkTask) + assert.NoError(t, err) + assert.Equal(t, true, forkTask.Fork.Compete) + + task1 := forkTask.Fork.Branches.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := forkTask.Fork.Branches.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForkTask_MarshalJSON(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + data, err := json.Marshal(forkTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ], + "compete": true + } + }`, string(data)) +} + +func TestForkTask_Validation(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + err := validate.Struct(forkTask) + assert.Error(t, err) +} diff --git a/model/task_raise.go b/model/task_raise.go new file mode 100644 index 0000000..b0c7499 --- /dev/null +++ b/model/task_raise.go @@ -0,0 +1,84 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +type Error struct { + Type *URITemplateOrRuntimeExpr `json:"type" validate:"required"` + Status int `json:"status" validate:"required"` + Title string `json:"title,omitempty"` + Detail string `json:"detail,omitempty"` + Instance *JsonPointerOrRuntimeExpression `json:"instance,omitempty" validate:"omitempty"` +} + +type ErrorFilter struct { + Type string `json:"type,omitempty"` + Status int `json:"status,omitempty"` + Instance string `json:"instance,omitempty"` + Title string `json:"title,omitempty"` + Details string `json:"details,omitempty"` +} + +// RaiseTask represents a task configuration to raise errors. +type RaiseTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Raise RaiseTaskConfiguration `json:"raise" validate:"required"` +} + +type RaiseTaskConfiguration struct { + Error RaiseTaskError `json:"error" validate:"required"` +} + +type RaiseTaskError struct { + Definition *Error `json:"-"` + Ref *string `json:"-"` +} + +// UnmarshalJSON for RaiseTaskError to enforce "oneOf" behavior. +func (rte *RaiseTaskError) UnmarshalJSON(data []byte) error { + // Try to unmarshal into a string (Ref) + var ref string + if err := json.Unmarshal(data, &ref); err == nil { + rte.Ref = &ref + rte.Definition = nil + return nil + } + + // Try to unmarshal into an Error (Definition) + var def Error + if err := json.Unmarshal(data, &def); err == nil { + rte.Definition = &def + rte.Ref = nil + return nil + } + + // If neither worked, return an error + return errors.New("invalid RaiseTaskError: data must be either a string (reference) or an object (definition)") +} + +// MarshalJSON for RaiseTaskError to ensure proper serialization. +func (rte *RaiseTaskError) MarshalJSON() ([]byte, error) { + if rte.Definition != nil { + return json.Marshal(rte.Definition) + } + if rte.Ref != nil { + return json.Marshal(*rte.Ref) + } + return nil, errors.New("invalid RaiseTaskError: neither 'definition' nor 'reference' is set") +} diff --git a/model/task_raise_test.go b/model/task_raise_test.go new file mode 100644 index 0000000..49ede54 --- /dev/null +++ b/model/task_raise_test.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRaiseTask_MarshalJSON(t *testing.T) { + raiseTask := &RaiseTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Raise: RaiseTaskConfiguration{ + Error: RaiseTaskError{ + Definition: &Error{ + Type: &URITemplateOrRuntimeExpr{Value: "http://example.com/error"}, + Status: 500, + Title: "Internal Server Error", + Detail: "An unexpected error occurred.", + }, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }`, string(data)) +} + +func TestRaiseTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }` + + var raiseTask *RaiseTask + err := json.Unmarshal([]byte(jsonData), &raiseTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, raiseTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, raiseTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, raiseTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, raiseTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, raiseTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, raiseTask.Metadata) + assert.Equal(t, "http://example.com/error", raiseTask.Raise.Error.Definition.Type.String()) + assert.Equal(t, 500, raiseTask.Raise.Error.Definition.Status) + assert.Equal(t, "Internal Server Error", raiseTask.Raise.Error.Definition.Title) + assert.Equal(t, "An unexpected error occurred.", raiseTask.Raise.Error.Definition.Detail) +} diff --git a/model/task_run.go b/model/task_run.go new file mode 100644 index 0000000..6942013 --- /dev/null +++ b/model/task_run.go @@ -0,0 +1,124 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// RunTask represents a task configuration to execute external processes. +type RunTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Run RunTaskConfiguration `json:"run" validate:"required"` +} + +type RunTaskConfiguration struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` +} + +type Container struct { + Image string `json:"image" validate:"required"` + Command string `json:"command,omitempty"` + Ports map[string]interface{} `json:"ports,omitempty"` + Volumes map[string]interface{} `json:"volumes,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type Script struct { + Language string `json:"language" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` + InlineCode *string `json:"code,omitempty"` + External *ExternalResource `json:"source,omitempty"` +} + +type Shell struct { + Command string `json:"command" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type RunWorkflow struct { + Namespace string `json:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" validate:"required,semver_pattern"` + Input map[string]interface{} `json:"input,omitempty"` +} + +// UnmarshalJSON for RunTaskConfiguration to enforce "oneOf" behavior. +func (rtc *RunTaskConfiguration) UnmarshalJSON(data []byte) error { + temp := struct { + Await *bool `json:"await"` + Container *Container `json:"container"` + Script *Script `json:"script"` + Shell *Shell `json:"shell"` + Workflow *RunWorkflow `json:"workflow"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if temp.Container != nil { + count++ + rtc.Container = temp.Container + } + if temp.Script != nil { + count++ + rtc.Script = temp.Script + } + if temp.Shell != nil { + count++ + rtc.Shell = temp.Shell + } + if temp.Workflow != nil { + count++ + rtc.Workflow = temp.Workflow + } + + // Ensure only one of the options is set + if count != 1 { + return errors.New("invalid RunTaskConfiguration: only one of 'container', 'script', 'shell', or 'workflow' must be specified") + } + + rtc.Await = temp.Await + return nil +} + +// MarshalJSON for RunTaskConfiguration to ensure proper serialization. +func (rtc *RunTaskConfiguration) MarshalJSON() ([]byte, error) { + temp := struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` + }{ + Await: rtc.Await, + Container: rtc.Container, + Script: rtc.Script, + Shell: rtc.Shell, + Workflow: rtc.Workflow, + } + + return json.Marshal(temp) +} diff --git a/model/task_run_test.go b/model/task_run_test.go new file mode 100644 index 0000000..026b9c8 --- /dev/null +++ b/model/task_run_test.go @@ -0,0 +1,196 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRunTask_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Container: &Container{ + Image: "example-image", + Command: "example-command", + Ports: map[string]interface{}{ + "8080": "80", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }`, string(data)) +} + +func TestRunTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "example-image", runTask.Run.Container.Image) + assert.Equal(t, "example-command", runTask.Run.Container.Command) + assert.Equal(t, map[string]interface{}{"8080": "80"}, runTask.Run.Container.Ports) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Container.Environment) +} + +func TestRunTaskScript_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Script: &Script{ + Language: "python", + Arguments: map[string]interface{}{ + "arg1": "value1", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + InlineCode: stringPtr("print('Hello, World!')"), + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }`, string(data)) +} + +func TestRunTaskScript_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "python", runTask.Run.Script.Language) + assert.Equal(t, map[string]interface{}{"arg1": "value1"}, runTask.Run.Script.Arguments) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Script.Environment) + assert.Equal(t, "print('Hello, World!')", *runTask.Run.Script.InlineCode) +} + +func boolPtr(b bool) *bool { + return &b +} + +func stringPtr(s string) *string { + return &s +} diff --git a/model/task_set.go b/model/task_set.go new file mode 100644 index 0000000..654c48f --- /dev/null +++ b/model/task_set.go @@ -0,0 +1,36 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SetTask represents a task used to set data. +type SetTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Set map[string]interface{} `json:"set" validate:"required,min=1,dive"` +} + +// MarshalJSON for SetTask to ensure proper serialization. +func (st *SetTask) MarshalJSON() ([]byte, error) { + type Alias SetTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SetTask to ensure proper deserialization. +func (st *SetTask) UnmarshalJSON(data []byte) error { + type Alias SetTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_set_test.go b/model/task_set_test.go new file mode 100644 index 0000000..49781af --- /dev/null +++ b/model/task_set_test.go @@ -0,0 +1,104 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSetTask_MarshalJSON(t *testing.T) { + setTask := SetTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Set: map[string]interface{}{ + "key1": "value1", + "key2": 42, + }, + } + + data, err := json.Marshal(setTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }`, string(data)) +} + +func TestSetTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }` + + var setTask SetTask + err := json.Unmarshal([]byte(jsonData), &setTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, setTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, setTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, setTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, setTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, setTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, setTask.Metadata) + expectedSet := map[string]interface{}{ + "key1": "value1", + "key2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedSet, setTask.Set) +} + +func TestSetTask_Validation(t *testing.T) { + // Valid SetTask + setTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{ + "key": "value", + }, + } + assert.NoError(t, validate.Struct(setTask)) + + // Invalid SetTask (empty set) + invalidSetTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{}, + } + assert.Error(t, validate.Struct(invalidSetTask)) +} diff --git a/model/task_switch.go b/model/task_switch.go new file mode 100644 index 0000000..d63b2e7 --- /dev/null +++ b/model/task_switch.go @@ -0,0 +1,44 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SwitchTask represents a task configuration for conditional branching. +type SwitchTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Switch []SwitchItem `json:"switch" validate:"required,min=1,dive,switch_item"` +} + +type SwitchItem map[string]SwitchCase + +// SwitchCase defines a condition and the corresponding outcome for a switch task. +type SwitchCase struct { + When *RuntimeExpression `json:"when,omitempty"` + Then *FlowDirective `json:"then" validate:"required"` +} + +// MarshalJSON for SwitchTask to ensure proper serialization. +func (st *SwitchTask) MarshalJSON() ([]byte, error) { + type Alias SwitchTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SwitchTask to ensure proper deserialization. +func (st *SwitchTask) UnmarshalJSON(data []byte) error { + type Alias SwitchTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_switch_test.go b/model/task_switch_test.go new file mode 100644 index 0000000..3c40b5a --- /dev/null +++ b/model/task_switch_test.go @@ -0,0 +1,151 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSwitchTask_MarshalJSON(t *testing.T) { + switchTask := &SwitchTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + { + "case2": SwitchCase{ + When: &RuntimeExpression{Value: "${condition2}"}, + Then: &FlowDirective{Value: "end"}, + }, + }, + }, + } + + data, err := json.Marshal(switchTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }`, string(data)) +} + +func TestSwitchTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }` + + var switchTask SwitchTask + err := json.Unmarshal([]byte(jsonData), &switchTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, switchTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, switchTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, switchTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, switchTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, switchTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, switchTask.Metadata) + assert.Equal(t, 2, len(switchTask.Switch)) + assert.Equal(t, &RuntimeExpression{Value: "${condition1}"}, switchTask.Switch[0]["case1"].When) + assert.Equal(t, &FlowDirective{Value: "next"}, switchTask.Switch[0]["case1"].Then) + assert.Equal(t, &RuntimeExpression{Value: "${condition2}"}, switchTask.Switch[1]["case2"].When) + assert.Equal(t, &FlowDirective{Value: "end"}, switchTask.Switch[1]["case2"].Then) +} + +func TestSwitchTask_Validation(t *testing.T) { + // Valid SwitchTask + switchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + }, + } + assert.NoError(t, validate.Struct(switchTask)) + + // Invalid SwitchTask (empty switch) + invalidSwitchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{}, + } + assert.Error(t, validate.Struct(invalidSwitchTask)) + + // Invalid SwitchTask (SwitchItem with multiple keys) + invalidSwitchItemTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{When: &RuntimeExpression{Value: "${condition1}"}, Then: &FlowDirective{Value: "next"}}, + "case2": SwitchCase{When: &RuntimeExpression{Value: "${condition2}"}, Then: &FlowDirective{Value: "end"}}, + }, + }, + } + assert.Error(t, validate.Struct(invalidSwitchItemTask)) +} diff --git a/model/task_test.go b/model/task_test.go new file mode 100644 index 0000000..6fa5019 --- /dev/null +++ b/model/task_test.go @@ -0,0 +1,121 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestTaskList_UnmarshalJSON(t *testing.T) { + jsonData := `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}}]}} + ]` + + var taskList TaskList + err := json.Unmarshal([]byte(jsonData), &taskList) + assert.NoError(t, err) + assert.Equal(t, 2, len(taskList)) + + task1 := taskList.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := taskList.Key("task2").AsDoTask() + assert.NotNil(t, task2) + assert.Equal(t, 1, len(*task2.Do)) + + task3 := task2.Do.Key("task3").AsCallOpenAPITask() + assert.NotNil(t, task3) + assert.Equal(t, "openapi", task3.Call) + assert.Equal(t, "doc1", task3.With.Document.Name) + assert.Equal(t, "op1", task3.With.OperationID) +} + +func TestTaskList_MarshalJSON(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + data, err := json.Marshal(taskList) + assert.NoError(t, err) + assert.JSONEq(t, `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}}]}} + ]`, string(data)) +} + +func TestTaskList_Validation(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + // Validate each TaskItem explicitly + for _, taskItem := range taskList { + err := validate.Struct(taskItem) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Errorf("Validation failed on field '%s' with tag '%s'", validationErr.Field(), validationErr.Tag()) + } + } else { + t.Errorf("Unexpected error: %v", err) + } + } + } + +} diff --git a/model/task_try.go b/model/task_try.go new file mode 100644 index 0000000..91d3797 --- /dev/null +++ b/model/task_try.go @@ -0,0 +1,202 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TryTask struct { + TaskBase `json:",inline"` + Try *TaskList `json:"try" validate:"required,dive"` + Catch *TryTaskCatch `json:"catch" validate:"required"` +} + +type TryTaskCatch struct { + Errors struct { + With *ErrorFilter `json:"with,omitempty"` + } `json:"errors,omitempty"` + As string `json:"as,omitempty"` + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Retry *RetryPolicy `json:"retry,omitempty"` + Do *TaskList `json:"do,omitempty" validate:"omitempty,dive"` +} + +// RetryPolicy defines a retry policy. +type RetryPolicy struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + Ref string `json:"-"` // Reference to a reusable retry policy +} + +// MarshalJSON for RetryPolicy to ensure proper serialization. +func (rp *RetryPolicy) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + }{ + When: rp.When, + ExceptWhen: rp.ExceptWhen, + Delay: rp.Delay, + Backoff: rp.Backoff, + Limit: rp.Limit, + Jitter: rp.Jitter, + }) +} + +// UnmarshalJSON for RetryPolicy to ensure proper deserialization. +func (rp *RetryPolicy) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy: %w", err) + } + + switch v := raw.(type) { + case string: + // If it's a string, treat it as a reference + rp.Ref = v + case map[string]interface{}: + // If it's an object, unmarshal into the struct + type Alias RetryPolicy + alias := &struct { + *Alias + }{ + Alias: (*Alias)(rp), + } + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy object: %w", err) + } + default: + return fmt.Errorf("invalid RetryPolicy type: %T", v) + } + + return nil +} + +func (rp *RetryPolicy) ResolveReference(retries map[string]*RetryPolicy) error { + if rp.Ref == "" { + // No reference to resolve + return nil + } + + resolved, exists := retries[rp.Ref] + if !exists { + return fmt.Errorf("retry policy reference %q not found", rp.Ref) + } + + // Copy resolved policy fields into the current RetryPolicy + *rp = *resolved + rp.Ref = "" // Clear the reference to avoid confusion + + return nil +} + +func ResolveRetryPolicies(tasks []TryTaskCatch, retries map[string]*RetryPolicy) error { + for i := range tasks { + if tasks[i].Retry != nil { + if err := tasks[i].Retry.ResolveReference(retries); err != nil { + return fmt.Errorf("failed to resolve retry policy for task %q: %w", tasks[i].As, err) + } + } + } + return nil +} + +// RetryBackoff defines the retry backoff strategies. +type RetryBackoff struct { + Constant *BackoffDefinition `json:"constant,omitempty"` + Exponential *BackoffDefinition `json:"exponential,omitempty"` + Linear *BackoffDefinition `json:"linear,omitempty"` +} + +// MarshalJSON for RetryBackoff to ensure oneOf behavior. +func (rb *RetryBackoff) MarshalJSON() ([]byte, error) { + switch { + case rb.Constant != nil: + return json.Marshal(map[string]interface{}{"constant": rb.Constant.Definition}) + case rb.Exponential != nil: + return json.Marshal(map[string]interface{}{"exponential": rb.Exponential.Definition}) + case rb.Linear != nil: + return json.Marshal(map[string]interface{}{"linear": rb.Linear.Definition}) + default: + return nil, errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") + } +} + +func (rb *RetryBackoff) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryBackoff: %w", err) + } + + if rawConstant, ok := raw["constant"]; ok { + rb.Constant = &BackoffDefinition{} + if err := json.Unmarshal(rawConstant, &rb.Constant.Definition); err != nil { + return fmt.Errorf("failed to unmarshal constant backoff: %w", err) + } + return nil + } + + if rawExponential, ok := raw["exponential"]; ok { + rb.Exponential = &BackoffDefinition{} + if err := json.Unmarshal(rawExponential, &rb.Exponential.Definition); err != nil { + return fmt.Errorf("failed to unmarshal exponential backoff: %w", err) + } + return nil + } + + if rawLinear, ok := raw["linear"]; ok { + rb.Linear = &BackoffDefinition{} + if err := json.Unmarshal(rawLinear, &rb.Linear.Definition); err != nil { + return fmt.Errorf("failed to unmarshal linear backoff: %w", err) + } + return nil + } + + return errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") +} + +type BackoffDefinition struct { + Definition map[string]interface{} `json:"definition,omitempty"` +} + +// RetryLimit defines the retry limit configurations. +type RetryLimit struct { + Attempt *RetryLimitAttempt `json:"attempt,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryLimitAttempt defines the limit for each retry attempt. +type RetryLimitAttempt struct { + Count int `json:"count,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryPolicyJitter defines the randomness or variability of retry delays. +type RetryPolicyJitter struct { + From *Duration `json:"from" validate:"required"` + To *Duration `json:"to" validate:"required"` +} diff --git a/model/task_try_test.go b/model/task_try_test.go new file mode 100644 index 0000000..4daf839 --- /dev/null +++ b/model/task_try_test.go @@ -0,0 +1,171 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRetryPolicy_MarshalJSON(t *testing.T) { + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{ + Definition: map[string]interface{}{"factor": 2}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + + data, err := json.Marshal(retryPolicy) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }`, string(data)) +} + +func TestRetryPolicy_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }` + + var retryPolicy RetryPolicy + err := json.Unmarshal([]byte(jsonData), &retryPolicy) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${someCondition}"}, retryPolicy.When) + assert.Equal(t, &RuntimeExpression{"${someOtherCondition}"}, retryPolicy.ExceptWhen) + assert.Equal(t, NewDurationExpr("PT5S"), retryPolicy.Delay) + assert.NotNil(t, retryPolicy.Backoff.Exponential) + assert.Equal(t, map[string]interface{}{"factor": float64(2)}, retryPolicy.Backoff.Exponential.Definition) + assert.Equal(t, 3, retryPolicy.Limit.Attempt.Count) + assert.Equal(t, NewDurationExpr("PT1M"), retryPolicy.Limit.Attempt.Duration) + assert.Equal(t, NewDurationExpr("PT10M"), retryPolicy.Limit.Duration) + assert.Equal(t, NewDurationExpr("PT1S"), retryPolicy.Jitter.From) + assert.Equal(t, NewDurationExpr("PT3S"), retryPolicy.Jitter.To) +} + +func TestRetryPolicy_Validation(t *testing.T) { + // Valid RetryPolicy + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Constant: &BackoffDefinition{ + Definition: map[string]interface{}{"delay": 5}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + assert.NoError(t, validate.Struct(retryPolicy)) + + // Invalid RetryPolicy (missing required fields in Jitter) + invalidRetryPolicy := RetryPolicy{ + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + }, + } + assert.Error(t, validate.Struct(invalidRetryPolicy)) +} + +func TestRetryPolicy_UnmarshalJSON_WithReference(t *testing.T) { + retries := map[string]*RetryPolicy{ + "default": { + Delay: &Duration{DurationInline{Seconds: 3}}, + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{}, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{Count: 5}, + }, + }, + } + + jsonData := `{ + "retry": "default" + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + // Resolve the reference + err = task.Retry.ResolveReference(retries) + assert.NoError(t, err) + + assert.Equal(t, retries["default"].Delay, task.Retry.Delay) + assert.Equal(t, retries["default"].Backoff, task.Retry.Backoff) + assert.Equal(t, retries["default"].Limit, task.Retry.Limit) +} + +func TestRetryPolicy_UnmarshalJSON_Inline(t *testing.T) { + jsonData := `{ + "retry": { + "delay": { "seconds": 3 }, + "backoff": { "exponential": {} }, + "limit": { "attempt": { "count": 5 } } + } + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + assert.NotNil(t, task.Retry) + assert.Equal(t, int32(3), task.Retry.Delay.AsInline().Seconds) + assert.NotNil(t, task.Retry.Backoff.Exponential) + assert.Equal(t, 5, task.Retry.Limit.Attempt.Count) +} diff --git a/model/task_wait.go b/model/task_wait.go new file mode 100644 index 0000000..41b5cc5 --- /dev/null +++ b/model/task_wait.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// WaitTask represents a task configuration to delay execution for a specified duration. +type WaitTask struct { + TaskBase `json:",inline"` + Wait *Duration `json:"wait" validate:"required"` +} + +// MarshalJSON for WaitTask to ensure proper serialization. +func (wt *WaitTask) MarshalJSON() ([]byte, error) { + type Alias WaitTask + waitData, err := json.Marshal(wt.Wait) + if err != nil { + return nil, err + } + + alias := struct { + Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (Alias)(*wt), + Wait: waitData, + } + + return json.Marshal(alias) +} + +// UnmarshalJSON for WaitTask to ensure proper deserialization. +func (wt *WaitTask) UnmarshalJSON(data []byte) error { + type Alias WaitTask + alias := struct { + *Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (*Alias)(wt), + } + + // Unmarshal data into alias + if err := json.Unmarshal(data, &alias); err != nil { + return fmt.Errorf("failed to unmarshal WaitTask: %w", err) + } + + // Unmarshal Wait field + if err := json.Unmarshal(alias.Wait, &wt.Wait); err != nil { + return fmt.Errorf("failed to unmarshal Wait field: %w", err) + } + + return nil +} diff --git a/model/task_wait_test.go b/model/task_wait_test.go new file mode 100644 index 0000000..6dda965 --- /dev/null +++ b/model/task_wait_test.go @@ -0,0 +1,88 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestWaitTask_MarshalJSON(t *testing.T) { + waitTask := &WaitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Wait: NewDurationExpr("P1DT1H"), + } + + data, err := json.Marshal(waitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }`, string(data)) +} + +func TestWaitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }` + + waitTask := &WaitTask{} + err := json.Unmarshal([]byte(jsonData), waitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, waitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, waitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, waitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, waitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, waitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, waitTask.Metadata) + assert.Equal(t, NewDurationExpr("P1DT1H"), waitTask.Wait) +} + +func TestWaitTask_Validation(t *testing.T) { + // Valid WaitTask + waitTask := &WaitTask{ + TaskBase: TaskBase{}, + Wait: NewDurationExpr("P1DT1H"), + } + assert.NoError(t, validate.Struct(waitTask)) + + // Invalid WaitTask (empty wait) + invalidWaitTask := &WaitTask{ + TaskBase: TaskBase{}, + } + assert.Error(t, validate.Struct(invalidWaitTask)) +} diff --git a/model/timeout.go b/model/timeout.go new file mode 100644 index 0000000..dd63af8 --- /dev/null +++ b/model/timeout.go @@ -0,0 +1,232 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// Timeout specifies a time limit for tasks or workflows. +type Timeout struct { + // After The duration after which to timeout + After *Duration `json:"after" validate:"required"` +} + +// UnmarshalJSON implements custom unmarshalling for Timeout. +func (t *Timeout) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + // Check if "after" key exists + afterData, ok := raw["after"] + if !ok { + return errors.New("missing 'after' key in Timeout JSON") + } + + // Unmarshal "after" using the Duration type + if err := json.Unmarshal(afterData, &t.After); err != nil { + return err + } + + return nil +} + +// MarshalJSON implements custom marshalling for Timeout. +func (t *Timeout) MarshalJSON() ([]byte, error) { + // Check the type of t.After.Value + switch v := t.After.Value.(type) { + case DurationInline: + // Serialize inline duration + return json.Marshal(map[string]interface{}{ + "after": v, + }) + case DurationExpression: + // Serialize expression as a simple string + return json.Marshal(map[string]string{ + "after": v.Expression, + }) + case string: + // Handle direct string values as DurationExpression + return json.Marshal(map[string]string{ + "after": v, + }) + default: + return nil, errors.New("unknown Duration type in Timeout") + } +} + +// TimeoutOrReference handles either a Timeout definition or a reference (string). +type TimeoutOrReference struct { + Timeout *Timeout `json:"-" validate:"required_without=Ref"` + Reference *string `json:"-" validate:"required_without=Timeout"` +} + +// UnmarshalJSON implements custom unmarshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) UnmarshalJSON(data []byte) error { + // Attempt to unmarshal as a Timeout + var asTimeout Timeout + if err := json.Unmarshal(data, &asTimeout); err == nil { + tr.Timeout = &asTimeout + tr.Reference = nil + return nil + } + + // Attempt to unmarshal as a string (reference) + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + tr.Reference = &asString + tr.Timeout = nil + return nil + } + + // If neither works, return an error + return errors.New("invalid TimeoutOrReference: must be a Timeout or a string reference") +} + +// MarshalJSON implements custom marshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) MarshalJSON() ([]byte, error) { + // Marshal as a Timeout if present + if tr.Timeout != nil { + return json.Marshal(tr.Timeout) + } + + // Marshal as a string reference if present + if tr.Reference != nil { + return json.Marshal(tr.Reference) + } + + return nil, errors.New("invalid TimeoutOrReference: neither Timeout nor Ref is set") +} + +// Duration represents a flexible duration that can be either inline or an ISO 8601 expression. +type Duration struct { + Value interface{} `json:"-"` +} + +// NewDurationExpr accessor to create a Duration object from a string +func NewDurationExpr(durationExpression string) *Duration { + return &Duration{DurationExpression{durationExpression}} +} + +func (d *Duration) AsExpression() string { + switch v := d.Value.(type) { + case string: + return v + case DurationExpression: + return v.String() + default: + return "" + } +} + +func (d *Duration) AsInline() *DurationInline { + switch v := d.Value.(type) { + case DurationInline: + return &v + default: + return nil + } +} + +// UnmarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) UnmarshalJSON(data []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err == nil { + validKeys := map[string]bool{"days": true, "hours": true, "minutes": true, "seconds": true, "milliseconds": true} + for key := range raw { + if !validKeys[key] { + return fmt.Errorf("unexpected key '%s' in duration object", key) + } + } + + inline := DurationInline{} + if err := json.Unmarshal(data, &inline); err != nil { + return fmt.Errorf("failed to unmarshal DurationInline: %w", err) + } + d.Value = inline + return nil + } + + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + d.Value = DurationExpression{Expression: asString} + return nil + } + + return errors.New("data must be a valid duration string or object") +} + +// MarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) MarshalJSON() ([]byte, error) { + switch v := d.Value.(type) { + case DurationInline: + return json.Marshal(v) + case DurationExpression: + return json.Marshal(v.Expression) + case string: + durationExpression := &DurationExpression{Expression: v} + return json.Marshal(durationExpression) + default: + return nil, errors.New("unknown Duration type") + } +} + +// DurationInline represents the inline definition of a duration. +type DurationInline struct { + Days int32 `json:"days,omitempty"` + Hours int32 `json:"hours,omitempty"` + Minutes int32 `json:"minutes,omitempty"` + Seconds int32 `json:"seconds,omitempty"` + Milliseconds int32 `json:"milliseconds,omitempty"` +} + +// MarshalJSON for DurationInline. +func (d *DurationInline) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "days": d.Days, + "hours": d.Hours, + "minutes": d.Minutes, + "seconds": d.Seconds, + "milliseconds": d.Milliseconds, + }) +} + +// DurationExpression represents the ISO 8601 expression of a duration. +type DurationExpression struct { + Expression string `json:"-" validate:"required,iso8601_duration"` +} + +func (d *DurationExpression) String() string { + return d.Expression +} + +// MarshalJSON for DurationExpression. +func (d *DurationExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(d.Expression) +} + +// UnmarshalJSON for DurationExpression to handle ISO 8601 strings. +func (d *DurationExpression) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err != nil { + return err + } + d.Expression = asString + return nil +} diff --git a/model/timeout_test.go b/model/timeout_test.go new file mode 100644 index 0000000..ae17555 --- /dev/null +++ b/model/timeout_test.go @@ -0,0 +1,228 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTimeout_UnmarshalJSON(t *testing.T) { + // Test cases for Timeout unmarshalling + tests := []struct { + name string + jsonStr string + expect *Timeout + err bool + }{ + { + name: "Valid inline duration", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + err: false, + }, + { + name: "Valid ISO 8601 duration", + jsonStr: `{"after": "P1Y2M3DT4H5M6S"}`, + expect: &Timeout{ + After: NewDurationExpr("P1Y2M3DT4H5M6S"), + }, + err: false, + }, + { + name: "Invalid duration type", + jsonStr: `{"after": {"unknown": "value"}}`, + expect: nil, + err: true, + }, + { + name: "Missing after key", + jsonStr: `{}`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var timeout Timeout + err := json.Unmarshal([]byte(test.jsonStr), &timeout) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &timeout) + } + }) + } +} + +func TestTimeout_MarshalJSON(t *testing.T) { + tests := []struct { + name string + input *Timeout + expected string + wantErr bool + }{ + { + name: "ISO 8601 Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationExpression{Expression: "PT1H"}, + }, + }, + expected: `{"after":"PT1H"}`, + wantErr: false, + }, + { + name: "Inline Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationInline{ + Days: 1, + Hours: 2, + Minutes: 30, + }, + }, + }, + expected: `{"after":{"days":1,"hours":2,"minutes":30}}`, + wantErr: false, + }, + { + name: "Invalid Duration", + input: &Timeout{After: &Duration{Value: 123}}, + expected: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := json.Marshal(tt.input) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, tt.expected, string(data)) + } + }) + } +} + +func TestTimeoutOrReference_UnmarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference unmarshalling + tests := []struct { + name string + jsonStr string + expect *TimeoutOrReference + err bool + }{ + { + name: "Valid Timeout", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + err: false, + }, + { + name: "Valid Ref", + jsonStr: `"some-timeout-reference"`, + expect: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + err: false, + }, + { + name: "Invalid JSON", + jsonStr: `{"invalid": }`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var tor TimeoutOrReference + err := json.Unmarshal([]byte(test.jsonStr), &tor) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &tor) + } + }) + } +} + +func ptrString(s string) *string { + return &s +} + +func TestTimeoutOrReference_MarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference marshalling + tests := []struct { + name string + input *TimeoutOrReference + expect string + err bool + }{ + { + name: "Valid Timeout", + input: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + expect: `{"after":{"days":1,"hours":2}}`, + err: false, + }, + { + name: "Valid Ref", + input: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + expect: `"some-timeout-reference"`, + err: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.input) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expect, string(data)) + } + }) + } +} diff --git a/model/validator.go b/model/validator.go new file mode 100644 index 0000000..91c34b9 --- /dev/null +++ b/model/validator.go @@ -0,0 +1,389 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "errors" + "fmt" + "github.com/go-playground/validator/v10" + "regexp" + "strings" +) + +var ( + iso8601DurationPattern = regexp.MustCompile(`^P(\d+Y)?(\d+M)?(\d+D)?(T(\d+H)?(\d+M)?(\d+S)?)?$`) + semanticVersionPattern = regexp.MustCompile(`^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) + hostnameRFC1123Pattern = regexp.MustCompile(`^(([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)*[a-zA-Z]{2,63}|[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)$`) +) + +var validate *validator.Validate + +func registerValidator(tag string, fn validator.Func) { + + if err := validate.RegisterValidation(tag, fn); err != nil { + panic(fmt.Sprintf("Failed to register validator '%s': %v", tag, err)) + } +} + +func init() { + validate = validator.New() + + registerValidator("basic_policy", validateBasicPolicy) + registerValidator("bearer_policy", validateBearerPolicy) + registerValidator("digest_policy", validateDigestPolicy) + registerValidator("oauth2_policy", validateOAuth2Policy) + registerValidator("client_auth_type", validateOptionalOAuthClientAuthentication) + registerValidator("encoding_type", validateOptionalOAuth2TokenRequestEncoding) + + registerValidator("hostname_rfc1123", func(fl validator.FieldLevel) bool { + return isHostnameValid(fl.Field().String()) + }) + registerValidator("uri_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriPattern.MatchString(value) + }) + registerValidator("uri_template_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriTemplatePattern.MatchString(value) + }) + registerValidator("semver_pattern", validateSemanticVersion) + registerValidator("iso8601_duration", validateISO8601Duration) + + registerValidator("object_or_string", validateObjectOrString) + registerValidator("object_or_runtime_expr", validateObjectOrRuntimeExpr) + registerValidator("string_or_runtime_expr", validateStringOrRuntimeExpr) + registerValidator("uri_template_or_runtime_expr", validateURITemplateOrRuntimeExpr) + registerValidator("json_pointer_or_runtime_expr", validateJsonPointerOrRuntimeExpr) + + registerValidator("switch_item", validateSwitchItem) + validate.RegisterStructValidation(validateTaskItem, TaskItem{}) +} + +func GetValidator() *validator.Validate { + return validate +} + +// validateTaskItem is a struct-level validation function for TaskItem. +func validateTaskItem(sl validator.StructLevel) { + taskItem := sl.Current().Interface().(TaskItem) + + // Validate Key + if taskItem.Key == "" { + sl.ReportError(taskItem.Key, "Key", "Key", "required", "") + return + } + + // Validate Task is not nil + if taskItem.Task == nil { + sl.ReportError(taskItem.Task, "Task", "Task", "required", "") + return + } + + // Validate the concrete type of Task and capture nested errors + switch t := taskItem.Task.(type) { + case *CallHTTP: + validateConcreteTask(sl, t, "Task") + case *CallOpenAPI: + validateConcreteTask(sl, t, "Task") + case *CallGRPC: + validateConcreteTask(sl, t, "Task") + case *CallAsyncAPI: + validateConcreteTask(sl, t, "Task") + case *CallFunction: + validateConcreteTask(sl, t, "Task") + case *DoTask: + validateConcreteTask(sl, t, "Task") + case *ForkTask: + validateConcreteTask(sl, t, "Task") + case *EmitTask: + validateConcreteTask(sl, t, "Task") + case *ForTask: + validateConcreteTask(sl, t, "Task") + case *ListenTask: + validateConcreteTask(sl, t, "Task") + case *RaiseTask: + validateConcreteTask(sl, t, "Task") + case *RunTask: + validateConcreteTask(sl, t, "Task") + case *SetTask: + validateConcreteTask(sl, t, "Task") + case *SwitchTask: + validateConcreteTask(sl, t, "Task") + case *TryTask: + validateConcreteTask(sl, t, "Task") + case *WaitTask: + validateConcreteTask(sl, t, "Task") + default: + sl.ReportError(taskItem.Task, "Task", "Task", "unknown_task", "unrecognized task type") + } +} + +// validateConcreteTask validates a concrete Task type and reports nested errors. +func validateConcreteTask(sl validator.StructLevel, task interface{}, fieldName string) { + err := validate.Struct(task) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, ve := range validationErrors { + // Report only nested fields to avoid duplicates + if ve.Namespace() != fieldName { + sl.ReportError(ve.Value(), fieldName+"."+ve.StructNamespace(), ve.StructField(), ve.Tag(), ve.Param()) + } + } + } + } +} + +// func validateSwitchItem(fl validator.FieldLevel) bool { is a custom validation function for SwitchItem. +func validateSwitchItem(fl validator.FieldLevel) bool { + switchItem, ok := fl.Field().Interface().(SwitchItem) + if !ok { + return false + } + return len(switchItem) == 1 +} + +// validateBasicPolicy ensures BasicAuthenticationPolicy has mutually exclusive fields set. +func validateBasicPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BasicAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +// validateBearerPolicy ensures BearerAuthenticationPolicy has mutually exclusive fields set. +func validateBearerPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BearerAuthenticationPolicy) + if !ok { + return false + } + if policy.Token != "" && policy.Use != "" { + return false + } + return true +} + +// validateDigestPolicy ensures DigestAuthenticationPolicy has mutually exclusive fields set. +func validateDigestPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(DigestAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +func validateOAuth2Policy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(OAuth2AuthenticationPolicy) + if !ok { + return false + } + + if (policy.Properties != nil || policy.Endpoints != nil) && policy.Use != "" { + return false // Both fields are set, invalid + } + if policy.Properties == nil && policy.Use == "" { + return false // Neither field is set, invalid + } + return true +} + +// validateOptionalOAuthClientAuthentication checks if the given value is a valid OAuthClientAuthenticationType. +func validateOptionalOAuthClientAuthentication(fl validator.FieldLevel) bool { + value := fl.Field().String() + + if len(value) == 0 { + return true + } + switch OAuthClientAuthenticationType(value) { + case + OAuthClientAuthClientSecretBasic, + OAuthClientAuthClientSecretPost, + OAuthClientAuthClientSecretJWT, + OAuthClientAuthPrivateKeyJWT, + OAuthClientAuthNone: + return true + default: + return false + } +} + +func validateOptionalOAuth2TokenRequestEncoding(fl validator.FieldLevel) bool { + value := fl.Field().String() + + // Allow empty fields (optional case) + if value == "" { + return true + } + + // Validate against allowed constants + switch OAuth2TokenRequestEncodingType(value) { + case + EncodingTypeFormUrlEncoded, + EncodingTypeApplicationJson: + return true + default: + return false + } +} + +func validateObjectOrString(fl validator.FieldLevel) bool { + // Access the "Value" field + value := fl.Field().Interface() + + // Validate based on the type of "Value" + switch v := value.(type) { + case string: + return v != "" // Validate non-empty strings. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Reject unsupported types. + } +} + +func validateObjectOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Unsupported types. + } +} + +func validateStringOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case string: + return v != "" // Validate non-empty strings. + default: + return false // Unsupported types. + } +} + +func validateURITemplateOrRuntimeExpr(fl validator.FieldLevel) bool { + value := fl.Field().Interface() + + // Handle nil or empty values when 'omitempty' is used + if value == nil { + return true + } + + switch v := value.(type) { + case LiteralUri: + return LiteralUriPattern.MatchString(v.String()) + case LiteralUriTemplate: + return LiteralUriTemplatePattern.MatchString(v.String()) + case RuntimeExpression: + return v.IsValid() + case string: + // Check if the string is a valid URI + if LiteralUriPattern.MatchString(v) { + return true + } + + // Check if the string is a valid URI Template + if LiteralUriTemplatePattern.MatchString(v) { + return true + } + + // Check if the string is a valid RuntimeExpression + expression := RuntimeExpression{Value: v} + return expression.IsValid() + default: + fmt.Printf("Unsupported type in URITemplateOrRuntimeExpr.Value: %T\n", v) + return false + } +} + +func validateJsonPointerOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case string: // JSON Pointer + return JSONPointerPattern.MatchString(v) + case RuntimeExpression: + return v.IsValid() + default: + return false // Unsupported types. + } +} + +func validateISO8601Duration(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isISO8601DurationValid(input) +} + +func validateSemanticVersion(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isSemanticVersionValid(input) +} + +// isISO8601DurationValid validates if a string is a valid ISO 8601 duration. +func isISO8601DurationValid(input string) bool { + if !iso8601DurationPattern.MatchString(input) { + return false + } + + trimmed := strings.TrimPrefix(input, "P") + if trimmed == "" || trimmed == "T" { + return false + } + + return true +} + +// isSemanticVersionValid validates if a string is a valid semantic version. +func isSemanticVersionValid(input string) bool { + return semanticVersionPattern.MatchString(input) +} + +// isHostnameValid validates if a string is a valid RFC 1123 hostname. +func isHostnameValid(input string) bool { + return hostnameRFC1123Pattern.MatchString(input) +} diff --git a/model/validator_test.go b/model/validator_test.go new file mode 100644 index 0000000..6607369 --- /dev/null +++ b/model/validator_test.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func TestRegexValidators(t *testing.T) { + testCases := []struct { + name string + validate func(string) bool + input string + expected bool + }{ + // ISO 8601 Duration Tests + {"ISO 8601 Duration Valid 1", isISO8601DurationValid, "P2Y", true}, + {"ISO 8601 Duration Valid 2", isISO8601DurationValid, "P1DT12H30M", true}, + {"ISO 8601 Duration Valid 3", isISO8601DurationValid, "P1Y2M3D", true}, + {"ISO 8601 Duration Valid 4", isISO8601DurationValid, "P1Y2M3D4H", false}, + {"ISO 8601 Duration Valid 5", isISO8601DurationValid, "P1Y", true}, + {"ISO 8601 Duration Valid 6", isISO8601DurationValid, "PT1H", true}, + {"ISO 8601 Duration Valid 7", isISO8601DurationValid, "P1Y2M3D4H5M6S", false}, + {"ISO 8601 Duration Invalid 1", isISO8601DurationValid, "P", false}, + {"ISO 8601 Duration Invalid 2", isISO8601DurationValid, "P1Y2M3D4H5M6S7", false}, + {"ISO 8601 Duration Invalid 3", isISO8601DurationValid, "1Y", false}, + + // Semantic Versioning Tests + {"Semantic Version Valid 1", isSemanticVersionValid, "1.0.0", true}, + {"Semantic Version Valid 2", isSemanticVersionValid, "1.2.3", true}, + {"Semantic Version Valid 3", isSemanticVersionValid, "1.2.3-beta", true}, + {"Semantic Version Valid 4", isSemanticVersionValid, "1.2.3-beta.1", true}, + {"Semantic Version Valid 5", isSemanticVersionValid, "1.2.3-beta.1+build.123", true}, + {"Semantic Version Invalid 1", isSemanticVersionValid, "v1.2.3", false}, + {"Semantic Version Invalid 2", isSemanticVersionValid, "1.2", false}, + {"Semantic Version Invalid 3", isSemanticVersionValid, "1.2.3-beta.x", true}, + + // RFC 1123 Hostname Tests + {"RFC 1123 Hostname Valid 1", isHostnameValid, "example.com", true}, + {"RFC 1123 Hostname Valid 2", isHostnameValid, "my-hostname", true}, + {"RFC 1123 Hostname Valid 3", isHostnameValid, "subdomain.example.com", true}, + {"RFC 1123 Hostname Invalid 1", isHostnameValid, "127.0.0.1", false}, + {"RFC 1123 Hostname Invalid 2", isHostnameValid, "example.com.", false}, + {"RFC 1123 Hostname Invalid 3", isHostnameValid, "example..com", false}, + {"RFC 1123 Hostname Invalid 4", isHostnameValid, "example.com-", false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := tc.validate(tc.input) + if result != tc.expected { + t.Errorf("Validation failed for '%s': input='%s', expected=%v, got=%v", tc.name, tc.input, tc.expected, result) + } + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 54723bb..17973e1 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -1,10 +1,10 @@ -// Copyright 2021 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -15,583 +15,221 @@ package model import ( - "bytes" "encoding/json" "errors" - - "github.com/serverlessworkflow/sdk-go/v2/util" + "fmt" ) -// InvokeKind defines how the target is invoked. -type InvokeKind string - -func (i InvokeKind) KindValues() []string { - return []string{ - string(InvokeKindSync), - string(InvokeKindAsync), +// Workflow represents the root structure of a workflow. +type Workflow struct { + Document Document `json:"document" yaml:"document" validate:"required"` + Input *Input `json:"input,omitempty" yaml:"input,omitempty"` + Use *Use `json:"use,omitempty" yaml:"use"` + Do *TaskList `json:"do" yaml:"do" validate:"required,dive"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" yaml:"timeout,omitempty"` + Output *Output `json:"output,omitempty" yaml:"output,omitempty"` + Schedule *Schedule `json:"schedule,omitempty" yaml:"schedule,omitempty"` +} + +func (w *Workflow) MarshalYAML() (interface{}, error) { + // Create a map to hold fields + data := map[string]interface{}{ + "document": w.Document, } -} -func (i InvokeKind) String() string { - return string(i) -} - -const ( - // InvokeKindSync meaning that worfklow execution should wait until the target completes. - InvokeKindSync InvokeKind = "sync" - // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its - // completion. - InvokeKindAsync InvokeKind = "async" -) - -// ActionMode specifies how actions are to be performed. -type ActionMode string - -func (i ActionMode) KindValues() []string { - return []string{ - string(ActionModeSequential), - string(ActionModeParallel), + // Conditionally add fields + if w.Input != nil { + data["input"] = w.Input + } + if w.Use != nil { + data["use"] = w.Use + } + data["do"] = w.Do + if w.Timeout != nil { + data["timeout"] = w.Timeout + } + if w.Output != nil { + data["output"] = w.Output + } + if w.Schedule != nil { + data["schedule"] = w.Schedule } -} -func (i ActionMode) String() string { - return string(i) + return data, nil } -const ( - // ActionModeSequential specifies actions should be performed in sequence - ActionModeSequential ActionMode = "sequential" - - // ActionModeParallel specifies actions should be performed in parallel - ActionModeParallel ActionMode = "parallel" -) - -const ( - // UnlimitedTimeout description for unlimited timeouts - UnlimitedTimeout = "unlimited" -) - -type ExpressionLangType string - -func (i ExpressionLangType) KindValues() []string { - return []string{ - string(JqExpressionLang), - string(JsonPathExpressionLang), - string(CELExpressionLang), - } +// Document holds metadata for the workflow. +type Document struct { + DSL string `json:"dsl" yaml:"dsl" validate:"required,semver_pattern"` + Namespace string `json:"namespace" yaml:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" yaml:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" yaml:"version" validate:"required,semver_pattern"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + Tags map[string]string `json:"tags,omitempty" yaml:"tags,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty" yaml:"metadata,omitempty"` } -func (i ExpressionLangType) String() string { - return string(i) +// Input Configures the workflow's input. +type Input struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + From *ObjectOrRuntimeExpr `json:"from,omitempty" validate:"omitempty"` } -const ( - //JqExpressionLang ... - JqExpressionLang ExpressionLangType = "jq" - - // JsonPathExpressionLang ... - JsonPathExpressionLang ExpressionLangType = "jsonpath" - - // CELExpressionLang - CELExpressionLang ExpressionLangType = "cel" -) - -// BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces -// to make it easy for custom unmarshalers implementations to unmarshal the common data structure. -// +builder-gen:new-call=ApplyDefault -type BaseWorkflow struct { - // Workflow unique identifier - // +optional - ID string `json:"id,omitempty" validate:"required_without=Key"` - // Key Domain-specific workflow identifier - // +optional - Key string `json:"key,omitempty" validate:"required_without=ID"` - // Workflow name - Name string `json:"name,omitempty"` - // Workflow description. - // +optional - Description string `json:"description,omitempty"` - // Workflow version. - // +optional - Version string `json:"version" validate:"omitempty,min=1"` - // Workflow start definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Start *Start `json:"start,omitempty"` - // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important - // qualities. - // +optional - Annotations []string `json:"annotations,omitempty"` - // DataInputSchema URI or Object of the JSON Schema used to validate the workflow data input - // +optional - DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` - // Serverless Workflow schema version - // +kubebuilder:validation:Required - // +kubebuilder:default="0.8" - SpecVersion string `json:"specVersion" validate:"required"` - // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, - // inside your Workflow Expressions. - // +optional - Secrets Secrets `json:"secrets,omitempty" validate:"unique"` - // Constants Workflow constants are used to define static, and immutable, data which is available to - // Workflow Expressions. - // +optional - Constants *Constants `json:"constants,omitempty"` - // Identifies the expression language used for workflow expressions. Default is 'jq'. - // +kubebuilder:validation:Enum=jq;jsonpath;cel - // +kubebuilder:default=jq - // +optional - ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"required,oneofkind"` - // Defines the workflow default timeout settings. - // +optional - Timeouts *Timeouts `json:"timeouts,omitempty"` - // Defines checked errors that can be explicitly handled during workflow execution. - // +optional - Errors Errors `json:"errors,omitempty" validate:"unique=Name,dive"` - // If "true", workflow instances is not terminated when there are no active execution paths. - // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" - // +optional - KeepActive bool `json:"keepActive,omitempty"` - // Metadata custom information shared with the runtime. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` - // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false - // +optional - AutoRetries bool `json:"autoRetries,omitempty"` - // Auth definitions can be used to define authentication information that should be applied to resources defined - // in the operation property of function definitions. It is not used as authentication information for the - // function invocation, but just to access the resource containing the function invocation information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Auth Auths `json:"auth,omitempty" validate:"unique=Name,dive"` +// Output Configures the output of a workflow or task. +type Output struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -// ApplyDefault set the default values for Workflow -func (w *BaseWorkflow) ApplyDefault() { - w.SpecVersion = "0.8" - w.ExpressionLang = JqExpressionLang +// Export Set the content of the context. +type Export struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -type Auths []Auth - -type authsUnmarshal Auths - -// UnmarshalJSON implements json.Unmarshaler -func (r *Auths) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) +// Schedule the workflow. +type Schedule struct { + Every *Duration `json:"every,omitempty" validate:"omitempty"` + Cron string `json:"cron,omitempty" validate:"omitempty"` + After *Duration `json:"after,omitempty" validate:"omitempty"` + On *EventConsumptionStrategy `json:"on,omitempty" validate:"omitempty"` } -type Errors []Error - -type errorsUnmarshal Errors +const DefaultSchema = "json" -// UnmarshalJSON implements json.Unmarshaler -func (e *Errors) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) +// Schema represents the definition of a schema. +type Schema struct { + Format string `json:"format,omitempty"` + Document interface{} `json:"document,omitempty" validate:"omitempty"` + Resource *ExternalResource `json:"resource,omitempty" validate:"omitempty"` } -// Workflow base definition -// +builder-gen:embedded-ignore-method=BaseWorkflow -type Workflow struct { - BaseWorkflow `json:",inline"` - // +kubebuilder:pruning:PreserveUnknownFields - States States `json:"states" validate:"min=1,unique=Name,dive"` - // +optional - Events Events `json:"events,omitempty" validate:"unique=Name,dive"` - // +optional - Functions Functions `json:"functions,omitempty" validate:"unique=Name,dive"` - // +optional - Retries Retries `json:"retries,omitempty" validate:"unique=Name,dive"` +func (s *Schema) ApplyDefaults() { + if len(s.Format) == 0 { + s.Format = DefaultSchema + } } -type workflowUnmarshal Workflow +// UnmarshalJSON for Schema enforces "oneOf" behavior. +func (s *Schema) UnmarshalJSON(data []byte) error { + s.ApplyDefaults() -// UnmarshalJSON implementation for json Unmarshal function for the Workflow type -func (w *Workflow) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - err := util.UnmarshalObject("workflow", data, (*workflowUnmarshal)(w)) - if err != nil { + // Parse into a temporary map for flexibility + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { return err } - if w.Start == nil && len(w.States) > 0 { - w.Start = &Start{ - StateName: w.States[0].Name, + // Check for "document" + if doc, ok := raw["document"]; ok { + // Determine if "document" is a string or an object + switch doc.(type) { + case string: + s.Document = doc + case map[string]interface{}: + s.Document = doc + default: + return errors.New("invalid Schema: 'document' must be a string or an object") } } - return nil -} - -// States ... -// +kubebuilder:validation:MinItems=1 -type States []State - -type statesUnmarshal States - -// UnmarshalJSON implements json.Unmarshaler -func (s *States) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("states", data, (*statesUnmarshal)(s)) -} - -type Events []Event - -type eventsUnmarshal Events - -// UnmarshalJSON implements json.Unmarshaler -func (e *Events) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) -} - -type Functions []Function - -type functionsUnmarshal Functions - -// UnmarshalJSON implements json.Unmarshaler -func (f *Functions) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) -} - -type Retries []Retry - -type retriesUnmarshal Retries - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retries) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) -} - -// Timeouts ... -type Timeouts struct { - // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should - // be 'unlimited'. - // +optional - WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` - // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format). - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format). - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format). - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` - // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format). - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` -} - -type timeoutsUnmarshal Timeouts - -// UnmarshalJSON implements json.Unmarshaler -func (t *Timeouts) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) -} - -// WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration -// format. If not defined, the workflow execution should be given "unlimited" amount of time to complete. -// +builder-gen:new-call=ApplyDefault -type WorkflowExecTimeout struct { - // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. - // +kubebuilder:default=unlimited - Duration string `json:"duration" validate:"required,min=1,iso8601duration"` - // If false, workflow instance is allowed to finish current execution. If true, current workflow execution - // is stopped immediately. Default is false. - // +optional - Interrupt bool `json:"interrupt,omitempty"` - // Name of a workflow state to be executed before workflow instance is terminated. - // +optional - RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` -} - -type workflowExecTimeoutUnmarshal WorkflowExecTimeout - -// UnmarshalJSON implements json.Unmarshaler -func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) -} - -// ApplyDefault set the default values for Workflow Exec Timeout -func (w *WorkflowExecTimeout) ApplyDefault() { - w.Duration = UnlimitedTimeout -} - -// Error declaration for workflow definitions -type Error struct { - // Name Domain-specific error name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. - // Should not be defined if error is set to '*'. - // +optional - Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // OnError description. - // +optional - Description string `json:"description,omitempty"` -} - -// Start definition -type Start struct { - // Name of the starting workflow state - // +kubebuilder:validation:Required - StateName string `json:"stateName" validate:"required"` - // Define the recurring time intervals or cron expressions at which workflow instances should be automatically - // started. - // +optional - Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` -} - -type startUnmarshal Start - -// UnmarshalJSON implements json.Unmarshaler -func (s *Start) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) -} - -// Schedule ... -type Schedule struct { - // TODO Interval is required if Cron is not set and vice-versa, make a exclusive validation - // A recurring time interval expressed in the derivative of ISO 8601 format specified below. Declares that - // workflow instances should be automatically created at the start of each time interval in the series. - // +optional - Interval string `json:"interval,omitempty"` - // Cron expression defining when workflow instances should be automatically created. - // optional - Cron *Cron `json:"cron,omitempty"` - // Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time - // w/ timezone then proper timezone conversion will be applied. (default: UTC). - // +optional - Timezone string `json:"timezone,omitempty"` -} - -type scheduleUnmarshal Schedule - -// UnmarshalJSON implements json.Unmarshaler -func (s *Schedule) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) -} - -// Cron ... -type Cron struct { - // Cron expression describing when the workflow instance should be created (automatically). - // +kubebuilder:validation:Required - Expression string `json:"expression" validate:"required"` - // Specific date and time (ISO 8601 format) when the cron expression is no longer valid. - // +optional - ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601datetime"` -} + // Check for "resource" + if res, ok := raw["resource"]; ok { + var resource ExternalResource + resBytes, err := json.Marshal(res) + if err != nil { + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) + } + if err := json.Unmarshal(resBytes, &resource); err != nil { + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) + } + s.Resource = &resource + } -type cronUnmarshal Cron + // Validate "oneOf" logic + if (s.Document != nil && s.Resource != nil) || (s.Document == nil && s.Resource == nil) { + return errors.New("invalid Schema: must specify either 'document' or 'resource', but not both") + } -// UnmarshalJSON custom unmarshal function for Cron -func (c *Cron) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) + return nil } -// Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). -// Each state can define a transition definition that is used to determine which state to transition to next. -type Transition struct { - stateParent *State `json:"-"` // used in validation - // Name of the state to transition to next. - // +kubebuilder:validation:Required - NextState string `json:"nextState" validate:"required,min=1"` - // Array of producedEvent definitions. Events to be produced before the transition takes place. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty" validate:"omitempty,dive"` - // If set to true, triggers workflow compensation before this transition is taken. Default is false. - // +kubebuilder:default=false - // +optional - Compensate bool `json:"compensate,omitempty"` -} +// MarshalJSON for Schema marshals the correct field. +func (s *Schema) MarshalJSON() ([]byte, error) { + s.ApplyDefaults() -type transitionUnmarshal Transition + if s.Document != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "document": s.Document, + }) + } + if s.Resource != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "resource": s.Resource, + }) + } -// UnmarshalJSON implements json.Unmarshaler -func (t *Transition) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) + return nil, errors.New("invalid Schema: no valid field to marshal") } -// OnError ... -type OnError struct { - // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used - ErrorRef string `json:"errorRef,omitempty"` - // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used - ErrorRefs []string `json:"errorRefs,omitempty" validate:"omitempty,unique"` - // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` +type ExternalResource struct { + Name string `json:"name,omitempty"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// End definition -type End struct { - // If true, completes all execution flows in the given workflow instance. - // +optional - Terminate bool `json:"terminate,omitempty"` - // Array of producedEvent definitions. Defines events that should be produced. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` - // If set to true, triggers workflow compensation before workflow execution completes. Default is false. - // +optional - Compensate bool `json:"compensate,omitempty"` - // Defines that current workflow execution should stop, and execution should continue as a new workflow - // instance of the provided id - // +optional - ContinueAs *ContinueAs `json:"continueAs,omitempty"` +type Use struct { + Authentications map[string]*AuthenticationPolicy `json:"authentications,omitempty" validate:"omitempty,dive"` + Errors map[string]*Error `json:"errors,omitempty" validate:"omitempty,dive"` + Extensions ExtensionList `json:"extensions,omitempty" validate:"omitempty,dive"` + Functions NamedTaskMap `json:"functions,omitempty" validate:"omitempty,dive"` + Retries map[string]*RetryPolicy `json:"retries,omitempty" validate:"omitempty,dive"` + Secrets []string `json:"secrets,omitempty"` + Timeouts map[string]*Timeout `json:"timeouts,omitempty" validate:"omitempty,dive"` + Catalogs map[string]*Catalog `json:"catalogs,omitempty" validate:"omitempty,dive"` } -type endUnmarshal End - -// UnmarshalJSON implements json.Unmarshaler -func (e *End) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) +type Catalog struct { + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) -type ContinueAs struct { - // Unique id of the workflow to continue execution as. - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Version of the workflow to continue execution as. - // +optional - Version string `json:"version,omitempty"` - // If string type, an expression which selects parts of the states data output to become the workflow data input of - // continued execution. If object type, a custom object to become the workflow data input of the continued execution - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data Object `json:"data,omitempty"` - // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. - // Overwrites any specific settings set by that workflow - // +optional - WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` +// FlowDirective represents a directive that can be an enumerated or free-form string. +type FlowDirective struct { + Value string `json:"-" validate:"required"` // Ensure the value is non-empty. } -type continueAsUnmarshal ContinueAs - -// UnmarshalJSON implements json.Unmarshaler -func (c *ContinueAs) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) -} +type FlowDirectiveType string -// ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a -// workflow transitions. The eventRef property must match the name of one of the defined produced events in the -// events definition. -type ProduceEvent struct { - // Reference to a defined unique event name in the events definition - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // If String, expression which selects parts of the states data output to become the data of the produced event. - // If object a custom object to become the data of produced event. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data Object `json:"data,omitempty"` - // Add additional event extension context attributes. - // +optional - ContextAttributes map[string]string `json:"contextAttributes,omitempty"` -} +const ( + FlowDirectiveContinue FlowDirectiveType = "continue" + FlowDirectiveExit FlowDirectiveType = "exit" + FlowDirectiveEnd FlowDirectiveType = "end" +) -// StateDataFilter ... -type StateDataFilter struct { - // Workflow expression to filter the state data input - Input string `json:"input,omitempty"` - // Workflow expression that filters the state data output - Output string `json:"output,omitempty"` +// Enumerated values for FlowDirective. +var validFlowDirectives = map[string]struct{}{ + "continue": {}, + "exit": {}, + "end": {}, } -// DataInputSchema Used to validate the workflow data input against a defined JSON Schema -// +builder-gen:new-call=ApplyDefault -type DataInputSchema struct { - // +kubebuilder:validation:Required - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Schema *Object `json:"schema" validate:"required"` - // +kubebuilder:validation:Required - FailOnValidationErrors bool `json:"failOnValidationErrors"` +// IsEnum checks if the FlowDirective matches one of the enumerated values. +func (f *FlowDirective) IsEnum() bool { + _, exists := validFlowDirectives[f.Value] + return exists } -type dataInputSchemaUnmarshal DataInputSchema - -// UnmarshalJSON implements json.Unmarshaler -func (d *DataInputSchema) UnmarshalJSON(data []byte) error { - d.ApplyDefault() - - // expected: data = "{\"key\": \"value\"}" - // data = {"key": "value"} - // data = "file://..." - // data = { "schema": "{\"key\": \"value\"}", "failOnValidationErrors": true } - // data = { "schema": {"key": "value"}, "failOnValidationErrors": true } - // data = { "schema": "file://...", "failOnValidationErrors": true } - - schemaString := "" - err := util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &schemaString, (*dataInputSchemaUnmarshal)(d)) - if err != nil { +func (f *FlowDirective) UnmarshalJSON(data []byte) error { + var value string + if err := json.Unmarshal(data, &value); err != nil { return err } - - if d.Schema != nil { - if d.Schema.Type == Map { - return nil - - } else if d.Schema.Type == String { - schemaString = d.Schema.StringValue - - } else { - return errors.New("invalid dataInputSchema must be a string or object") - } - } - - if schemaString != "" { - data = []byte(schemaString) - if bytes.TrimSpace(data)[0] != '{' { - data = []byte("\"" + schemaString + "\"") - } - } - - d.Schema = new(Object) - return util.UnmarshalObject("schema", data, &d.Schema) -} - -// ApplyDefault set the default values for Data Input Schema -func (d *DataInputSchema) ApplyDefault() { - d.FailOnValidationErrors = true -} - -// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your -// Workflow Expressions. -type Secrets []string - -type secretsUnmarshal Secrets - -// UnmarshalJSON implements json.Unmarshaler -func (s *Secrets) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) -} - -// Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. -type Constants struct { - // Data represents the generic structure of the constants value - // +optional - Data ConstantsData `json:",omitempty"` + f.Value = value + return nil } -// UnmarshalJSON implements json.Unmarshaler -func (c *Constants) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("constants", data, &c.Data) +func (f *FlowDirective) MarshalJSON() ([]byte, error) { + return json.Marshal(f.Value) } - -type ConstantsData map[string]json.RawMessage diff --git a/model/workflow_ref.go b/model/workflow_ref.go deleted file mode 100644 index c1fd1ce..0000000 --- a/model/workflow_ref.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// CompletionType define on how to complete branch execution. -type OnParentCompleteType string - -func (i OnParentCompleteType) KindValues() []string { - return []string{ - string(OnParentCompleteTypeTerminate), - string(OnParentCompleteTypeContinue), - } -} - -func (i OnParentCompleteType) String() string { - return string(i) -} - -const ( - OnParentCompleteTypeTerminate OnParentCompleteType = "terminate" - OnParentCompleteTypeContinue OnParentCompleteType = "continue" -) - -// WorkflowRef holds a reference for a workflow definition -// +builder-gen:new-call=ApplyDefault -type WorkflowRef struct { - // Sub-workflow unique id - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - // +optional - Version string `json:"version,omitempty"` - // Specifies if the subflow should be invoked sync or async. - // Defaults to sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - // +optional - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` - // onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - // is 'async'. Defaults to terminate. - // +kubebuilder:validation:Enum=terminate;continue - // +kubebuilder:default=terminate - OnParentComplete OnParentCompleteType `json:"onParentComplete,omitempty" validate:"required,oneofkind"` -} - -type workflowRefUnmarshal WorkflowRef - -// UnmarshalJSON implements json.Unmarshaler -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - s.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) -} - -// ApplyDefault set the default values for Workflow Ref -func (s *WorkflowRef) ApplyDefault() { - s.Invoke = InvokeKindSync - s.OnParentComplete = "terminate" -} diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go deleted file mode 100644 index 4a69fb5..0000000 --- a/model/workflow_ref_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestWorkflowRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "normal object test", - data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "normal object test & defaults", - data: `{"workflowId": "1"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "normal string test", - data: `"1"`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid string format", - data: `"1`, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{}, - err: "subFlowRef.workflowId must be string", - }, - { - desp: "invalid string or object", - data: `1`, - expect: WorkflowRef{}, - err: `subFlowRef must be string or object`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/workflow_ref_validator_test.go b/model/workflow_ref_validator_test.go deleted file mode 100644 index 96a7f9c..0000000 --- a/model/workflow_ref_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestWorkflowRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(&baseWorkflow.States[0], true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].OperationState.Actions[0].FunctionRef = nil - baseWorkflow.States[0].OperationState.Actions[0].SubFlowRef = &WorkflowRef{ - WorkflowID: "workflowID", - Invoke: InvokeKindSync, - OnParentComplete: OnParentCompleteTypeTerminate, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.WorkflowID = "" - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.workflowID is required -workflow.states[0].actions[0].subFlowRef.invoke is required -workflow.states[0].actions[0].subFlowRef.onParentComplete is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "invalid invoce" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "invalid parent complete" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.invoke need by one of [sync async] -workflow.states[0].actions[0].subFlowRef.onParentComplete need by one of [terminate continue]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/workflow_test.go b/model/workflow_test.go index a5aa42a..df90f1e 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -1,10 +1,10 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -16,662 +16,554 @@ package model import ( "encoding/json" - "fmt" - "net/http" - "net/http/httptest" + "errors" "testing" - "github.com/serverlessworkflow/sdk-go/v2/util" + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" ) -func TestWorkflowStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Workflow - err string - } - testCases := []testCase{ - { - desp: "start string", - data: `{"start": "start state name"}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{}, - }, - err: ``, - }, - { - desp: "start empty and use the first state", - data: `{"states": [{"name": "start state name", "type": "operation"}]}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{ - { - BaseState: BaseState{ - Name: "start state name", - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - }, - }, - }, - }, - err: ``, - }, - { - desp: "start empty and states empty", - data: `{"states": []}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - }, - States: []State{}, - }, - err: ``, +func TestDocument_JSONMarshal(t *testing.T) { + doc := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", }, } - for _, tc := range testCases[1:] { - t.Run(tc.desp, func(t *testing.T) { - var v Workflow - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } + data, err := json.Marshal(doc) + assert.NoError(t, err) + + expectedJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + // Use JSON comparison to avoid formatting mismatches + var expected, actual map[string]interface{} + assert.NoError(t, json.Unmarshal([]byte(expectedJSON), &expected)) + assert.NoError(t, json.Unmarshal(data, &actual)) + assert.Equal(t, expected, actual) } -func TestContinueAsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ContinueAs - err string - } - testCases := []testCase{ - { - desp: "string", - data: `"1"`, - expect: ContinueAs{ - WorkflowID: "1", - }, - err: ``, - }, - { - desp: "object all field set", - data: `{"workflowId": "1", "version": "2", "data": "3", "workflowExecTimeout": {"duration": "PT1H", "interrupt": true, "runBefore": "4"}}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "2", - Data: FromString("3"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "PT1H", - Interrupt: true, - RunBefore: "4", - }, - }, - err: ``, - }, - { - desp: "object optional field unset", - data: `{"workflowId": "1"}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "", - Data: Object{}, - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "", - Interrupt: false, - RunBefore: "", - }, - }, - err: ``, - }, - { - desp: "invalid string format", - data: `"{`, - expect: ContinueAs{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid object format", - data: `{"workflowId": 1}`, - expect: ContinueAs{}, - err: `continueAs.workflowId must be string`, +func TestDocument_JSONUnmarshal(t *testing.T) { + inputJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) + + expected := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ContinueAs - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + assert.Equal(t, expected, doc) } -func TestEndUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect End - err string - } - testCases := []testCase{ - { - desp: "bool success", - data: `true`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: "string fail", - data: `"true"`, - expect: End{}, - err: `end must be bool or object`, - }, - { - desp: `object success`, - data: `{"terminate": true}`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"terminate": "true"}`, - expect: End{ - Terminate: true, - }, - err: `end.terminate must be bool`, - }, - { - desp: `object key invalid`, - data: `{"terminate_parameter_invalid": true}`, - expect: End{}, - err: ``, +func TestDocument_JSONUnmarshal_InvalidJSON(t *testing.T) { + invalidJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "tags": { + "env": "prod", + "team": "workflow" + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` // Missing closing brace for "tags" + + var doc Document + err := json.Unmarshal([]byte(invalidJSON), &doc) + assert.Error(t, err) +} + +func TestDocument_Validation_MissingRequiredField(t *testing.T) { + inputJSON := `{ + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0" + }` // Missing "dsl" + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) // JSON is valid for unmarshalling + + // Validate the struct + err = validate.Struct(doc) + assert.Error(t, err) + + // Assert that the error is specifically about the missing "dsl" field + assert.Contains(t, err.Error(), "Key: 'Document.DSL' Error:Field validation for 'DSL' failed on the 'required' tag") +} + +func TestSchemaValidation(t *testing.T) { + + tests := []struct { + name string + jsonInput string + valid bool + }{ + // Valid Cases + { + name: "Valid Inline Schema", + jsonInput: `{ + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid External Schema Without Name", + jsonInput: `{ + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid Inline Schema with Format", + jsonInput: `{ + "format": "yaml", + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema with Format", + jsonInput: `{ + "format": "xml", + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + // Invalid Cases + { + name: "Invalid Both Document and Resource", + jsonInput: `{ + "document": "{\"key\":\"value\"}", + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: false, + }, + { + name: "Invalid Missing Both Document and Resource", + jsonInput: `{ + "format": "json" + }`, + valid: false, + }, + { + name: "Invalid Resource Without Endpoint", + jsonInput: `{ + "resource": { + "name": "external-schema" + } + }`, + valid: false, + }, + { + name: "Invalid Resource with Invalid URL", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "not-a-valid-url" + } + } + }`, + valid: false, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v End - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var schema Schema + err := json.Unmarshal([]byte(tt.jsonInput), &schema) + if tt.valid { + // Assert no unmarshalling error + assert.NoError(t, err) + + // Validate the struct + err = validate.Struct(schema) + assert.NoError(t, err, "Expected valid schema but got validation error: %v", err) + } else { + // Assert unmarshalling or validation error + if err == nil { + err = validate.Struct(schema) + } + assert.Error(t, err, "Expected validation error but got none") + } }) } } -func TestWorkflowExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowExecTimeout - err string - } +type InputTestCase struct { + Name string + Input Input + ShouldErr bool +} - testCases := []testCase{ - { - desp: "string success", - data: `"PT15M"`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: ``, - }, - { - desp: "string fail", - data: `PT15M`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: `invalid character 'P' looking for beginning of value`, - }, - { - desp: `object success`, - data: `{"duration": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", +func TestInputValidation(t *testing.T) { + cases := []InputTestCase{ + { + Name: "Valid Input with Schema and From (object)", + Input: Input{ + Schema: &Schema{ + Format: "json", + Document: func() *string { + doc := "example schema" + return &doc + }(), + }, + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{ + "key": "value", + }, + }, }, - err: ``, + ShouldErr: false, }, { - desp: `object fail`, - data: `{"duration": PT15M}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", + Name: "Invalid Input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "example input", + }, }, - err: `invalid character 'P' looking for beginning of value`, + ShouldErr: true, }, { - desp: `object key invalid`, - data: `{"duration_invalid": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "unlimited", + Name: "Valid Input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "${ expression }", + }, }, - err: ``, + ShouldErr: true, }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowExecTimeout - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Start - err string - } - - testCases := []testCase{ { - desp: "string success", - data: `"start state"`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Empty From (expr)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: "", + }, }, - err: ``, + ShouldErr: true, }, { - desp: "string fail", - data: `start state`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Empty From (object)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{}, + }, }, - err: `invalid character 's' looking for beginning of value`, + ShouldErr: true, }, { - desp: `object success`, - data: `{"stateName": "start state"}`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Unsupported From Type", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: 123, + }, }, - err: ``, + ShouldErr: true, }, { - desp: `object fail`, - data: `{"stateName": start state}`, - expect: Start{ - StateName: "start state", + Name: "Valid Input with Schema Only", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, }, - err: `invalid character 's' looking for beginning of value`, + ShouldErr: false, }, { - desp: `object key invalid`, - data: `{"stateName_invalid": "start state"}`, - expect: Start{ - StateName: "", - }, - err: ``, + Name: "Input with Neither Schema Nor From", + Input: Input{}, + ShouldErr: false, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Start - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Struct(tc.Input) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } }) } } -func TestCronUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Cron - err string - } - - testCases := []testCase{ +func TestFlowDirectiveValidation(t *testing.T) { + cases := []struct { + Name string + Input FlowDirective + IsEnum bool // Expected result for IsEnum method. + ShouldErr bool // Expected result for validation. + }{ { - desp: "string success", - data: `"0 15,30,45 * ? * *"`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, + Name: "Valid Enum: continue", + Input: FlowDirective{Value: "continue"}, + IsEnum: true, + ShouldErr: false, }, { - desp: "string fail", - data: `0 15,30,45 * ? * *`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `invalid character '1' after top-level value`, + Name: "Valid Enum: exit", + Input: FlowDirective{Value: "exit"}, + IsEnum: true, + ShouldErr: false, }, { - desp: `object success`, - data: `{"expression": "0 15,30,45 * ? * *"}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, + Name: "Valid Enum: end", + Input: FlowDirective{Value: "end"}, + IsEnum: true, + ShouldErr: false, }, { - desp: `object fail`, - data: `{"expression": "0 15,30,45 * ? * *}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `unexpected end of JSON input`, + Name: "Valid Free-form String", + Input: FlowDirective{Value: "custom-directive"}, + IsEnum: false, + ShouldErr: false, }, { - desp: `object key invalid`, - data: `{"expression_invalid": "0 15,30,45 * ? * *"}`, - expect: Cron{}, - err: ``, + Name: "Invalid Empty String", + Input: FlowDirective{Value: ""}, + IsEnum: false, + ShouldErr: true, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Cron - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + // Validate the struct + err := validate.Var(tc.Input.Value, "required") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + // Check IsEnum result + assert.Equal(t, tc.IsEnum, tc.Input.IsEnum(), "unexpected IsEnum result") }) } } -func TestTransitionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Transition - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"next state"`, - expect: Transition{ - NextState: "next state", - }, - err: ``, +func TestUse_MarshalJSON(t *testing.T) { + use := Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), }, - { - desp: `object success`, - data: `{"nextState": "next state"}`, - expect: Transition{ - NextState: "next state", - }, - err: ``, + Errors: map[string]*Error{ + "error1": {Type: NewUriTemplate("http://example.com/errors"), Status: 404}, }, - { - desp: `object fail`, - data: `{"nextState": "next state}`, - expect: Transition{ - NextState: "next state", + Extensions: ExtensionList{ + {Key: "ext1", Extension: &Extension{Extend: "call"}}, + {Key: "ext2", Extension: &Extension{Extend: "emit"}}, + {Key: "ext3", Extension: &Extension{Extend: "for"}}, + }, + Functions: NamedTaskMap{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com/"), Method: "GET"}}, + }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, }, - err: `unexpected end of JSON input`, }, - { - desp: `object key invalid`, - data: `{"nextState_invalid": "next state"}`, - expect: Transition{}, - err: ``, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Transition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + data, err := json.Marshal(use) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [ + {"ext1": {"extend": "call"}}, + {"ext2": {"extend": "emit"}}, + {"ext3": {"extend": "for"}} + ], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com/", "method": "GET"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }`, string(data)) } -func TestDataInputSchemaUnmarshalJSON(t *testing.T) { +func TestUse_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [{"ext1": {"extend": "call"}}], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }` - var schemaName Object - err := json.Unmarshal([]byte("{\"key\": \"value\"}"), &schemaName) - if !assert.NoError(t, err) { - return - } + var use Use + err := json.Unmarshal([]byte(jsonData), &use) + assert.NoError(t, err) - type testCase struct { - desp string - data string - expect DataInputSchema - err string - } + assert.NotNil(t, use.Authentications["auth1"]) + assert.Equal(t, "alice", use.Authentications["auth1"].Basic.Username) + assert.Equal(t, "secret", use.Authentications["auth1"].Basic.Password) - testCases := []testCase{ - { - desp: "string success", - data: "{\"key\": \"value\"}", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: "string fail", - data: "{\"key\": }", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `invalid character '}' looking for beginning of value`, - }, - { - desp: `object success (without quotes)`, - data: `{"key": "value"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success`, - data: `{"schema": "{\"key\": \"value\"}"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success (without quotes)`, - data: `{"schema": {"key": "value"}}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object fail`, - data: `{"schema": "schema name}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `unexpected end of JSON input`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DataInputSchema - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err, tc.desp) - assert.Regexp(t, tc.err, err, tc.desp) - return - } + assert.NotNil(t, use.Errors["error1"]) + assert.Equal(t, "http://example.com/errors", use.Errors["error1"].Type.String()) + assert.Equal(t, 404, use.Errors["error1"].Status) - assert.NoError(t, err, tc.desp) - assert.Equal(t, tc.expect.Schema, v.Schema, tc.desp) - assert.Equal(t, tc.expect.FailOnValidationErrors, v.FailOnValidationErrors, tc.desp) - }) - } + assert.NotNil(t, use.Extensions.Key("ext1")) + assert.Equal(t, "call", use.Extensions.Key("ext1").Extend) + + assert.NotNil(t, use.Functions["func1"]) + assert.IsType(t, &CallHTTP{With: HTTPArguments{Endpoint: NewEndpoint("http://example.com")}}, use.Functions["func1"]) + + assert.NotNil(t, use.Retries["retry1"]) + assert.Equal(t, "PT5S", use.Retries["retry1"].Delay.AsExpression()) + assert.Equal(t, 3, use.Retries["retry1"].Limit.Attempt.Count) + + assert.Equal(t, []string{"secret1", "secret2"}, use.Secrets) + + assert.NotNil(t, use.Timeouts["timeout1"]) + assert.Equal(t, "PT1M", use.Timeouts["timeout1"].After.AsExpression()) + + assert.NotNil(t, use.Catalogs["catalog1"]) + assert.Equal(t, "http://example.com", use.Catalogs["catalog1"].Endpoint.URITemplate.String()) } -func TestConstantsUnmarshalJSON(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"testkey":"testvalue"}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - util.HttpClient = *server.Client() - - type testCase struct { - desp string - data string - expect Constants - err string - } - testCases := []testCase{ - { - desp: "object success", - data: `{"testkey":"testvalue}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `unexpected end of JSON input`, +func TestUse_Validation(t *testing.T) { + use := &Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), }, - { - desp: "object success", - data: `[]`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - // TODO: improve message: field is empty - err: `constants must be string or object`, + Errors: map[string]*Error{ + "error1": {Type: &URITemplateOrRuntimeExpr{&LiteralUri{"http://example.com/errors"}}, Status: 404}, }, - { - desp: "object success", - data: `{"testkey":"testvalue"}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: ``, + Extensions: ExtensionList{}, + Functions: map[string]Task{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com"), Method: "GET"}}, }, - { - desp: "file success", - data: fmt.Sprintf(`"%s/test.json"`, server.URL), - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, }, - err: ``, }, - { - desp: "file success", - data: `"uri_invalid"`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `file not found: "uri_invalid"`, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Constants - err := json.Unmarshal([]byte(tc.data), &v) + err := validate.Struct(use) + assert.NoError(t, err) - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } + // Test with missing required fields + use.Catalogs["catalog1"].Endpoint = nil + err = validate.Struct(use) + assert.Error(t, err) - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s'", validationErr.Namespace(), validationErr.Tag()) + } + + assert.Contains(t, validationErrors.Error(), "Catalogs[catalog1].Endpoint") + assert.Contains(t, validationErrors.Error(), "required") } } diff --git a/model/workflow_validator.go b/model/workflow_validator.go deleted file mode 100644 index dd9d1e7..0000000 --- a/model/workflow_validator.go +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -type contextValueKey string - -const ValidatorContextValue contextValueKey = "value" - -type WorkflowValidator func(mapValues ValidatorContext, sl validator.StructLevel) - -func ValidationWrap(fnCtx WorkflowValidator) validator.StructLevelFuncCtx { - return func(ctx context.Context, structLevel validator.StructLevel) { - if fnCtx != nil { - if mapValues, ok := ctx.Value(ValidatorContextValue).(ValidatorContext); ok { - fnCtx(mapValues, structLevel) - } - } - } -} - -// +builder-gen:ignore=true -type ValidatorContext struct { - States map[string]State - Functions map[string]Function - Events map[string]Event - Retries map[string]Retry - Errors map[string]Error -} - -func (c *ValidatorContext) init(workflow *Workflow) { - c.States = make(map[string]State, len(workflow.States)) - for _, state := range workflow.States { - c.States[state.BaseState.Name] = state - } - - c.Functions = make(map[string]Function, len(workflow.Functions)) - for _, function := range workflow.Functions { - c.Functions[function.Name] = function - } - - c.Events = make(map[string]Event, len(workflow.Events)) - for _, event := range workflow.Events { - c.Events[event.Name] = event - } - - c.Retries = make(map[string]Retry, len(workflow.Retries)) - for _, retry := range workflow.Retries { - c.Retries[retry.Name] = retry - } - - c.Errors = make(map[string]Error, len(workflow.Errors)) - for _, error := range workflow.Errors { - c.Errors[error.Name] = error - } -} - -func (c *ValidatorContext) ExistState(name string) bool { - if c.States == nil { - return true - } - _, ok := c.States[name] - return ok -} - -func (c *ValidatorContext) ExistFunction(name string) bool { - if c.Functions == nil { - return true - } - _, ok := c.Functions[name] - return ok -} - -func (c *ValidatorContext) ExistEvent(name string) bool { - if c.Events == nil { - return true - } - _, ok := c.Events[name] - return ok -} - -func (c *ValidatorContext) ExistRetry(name string) bool { - if c.Retries == nil { - return true - } - _, ok := c.Retries[name] - return ok -} - -func (c *ValidatorContext) ExistError(name string) bool { - if c.Errors == nil { - return true - } - _, ok := c.Errors[name] - return ok -} - -func NewValidatorContext(object any) context.Context { - contextValue := ValidatorContext{} - - if workflow, ok := object.(*Workflow); ok { - for i := range workflow.States { - s := &workflow.States[i] - if s.BaseState.Transition != nil { - s.BaseState.Transition.stateParent = s - } - for _, onError := range s.BaseState.OnErrors { - if onError.Transition != nil { - onError.Transition.stateParent = s - } - } - if s.Type == StateTypeSwitch { - if s.SwitchState.DefaultCondition.Transition != nil { - s.SwitchState.DefaultCondition.Transition.stateParent = s - } - for _, e := range s.SwitchState.EventConditions { - if e.Transition != nil { - e.Transition.stateParent = s - } - } - for _, d := range s.SwitchState.DataConditions { - if d.Transition != nil { - d.Transition.stateParent = s - } - } - } - } - contextValue.init(workflow) - } - - return context.WithValue(context.Background(), ValidatorContextValue, contextValue) -} - -func init() { - // TODO: create states graph to complex check - - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onErrorStructLevelValidationCtx), OnError{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(transitionStructLevelValidationCtx), Transition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(startStructLevelValidationCtx), Start{}) -} - -func startStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - start := structLevel.Current().Interface().(Start) - if start.StateName != "" && !ctx.ExistState(start.StateName) { - structLevel.ReportError(start.StateName, "StateName", "stateName", val.TagExists, "") - return - } -} - -func onErrorStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onError := structLevel.Current().Interface().(OnError) - hasErrorRef := onError.ErrorRef != "" - hasErrorRefs := len(onError.ErrorRefs) > 0 - - if !hasErrorRef && !hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagRequired, "") - } else if hasErrorRef && hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExclusive, "") - return - } - - if onError.ErrorRef != "" && !ctx.ExistError(onError.ErrorRef) { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExists, "") - } - - for _, errorRef := range onError.ErrorRefs { - if !ctx.ExistError(errorRef) { - structLevel.ReportError(onError.ErrorRefs, "ErrorRefs", "ErrorRefs", val.TagExists, "") - } - } -} - -func transitionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // Naive check if transitions exist - transition := structLevel.Current().Interface().(Transition) - if ctx.ExistState(transition.NextState) { - if transition.stateParent != nil { - parentBaseState := transition.stateParent - - if parentBaseState.Name == transition.NextState { - // TODO: Improve recursive check - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagRecursiveState, parentBaseState.Name) - } - - if parentBaseState.UsedForCompensation && !ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionUseForCompensation, "") - } - - if !parentBaseState.UsedForCompensation && ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionMainWorkflow, "") - } - } - - } else { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagExists, "") - } -} - -func validTransitionAndEnd(structLevel validator.StructLevel, field any, transition *Transition, end *End) { - hasTransition := transition != nil - isEnd := end != nil && (end.Terminate || end.Compensate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end - - if !hasTransition && !isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagRequired, "") - } else if hasTransition && isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagExclusive, "") - } -} - -func validationNotExclusiveParameters(values []bool) bool { - hasOne := false - hasTwo := false - - for i, val1 := range values { - if val1 { - hasOne = true - for j, val2 := range values { - if i != j && val2 { - hasTwo = true - break - } - } - break - } - } - - return hasOne && hasTwo -} diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go deleted file mode 100644 index 2a6b5a0..0000000 --- a/model/workflow_validator_test.go +++ /dev/null @@ -1,544 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func buildWorkflow() *Workflow { - return &Workflow{ - BaseWorkflow: BaseWorkflow{ - ID: "id", - Key: "key", - Name: "name", - SpecVersion: "0.8", - Version: "0.1", - ExpressionLang: JqExpressionLang, - }, - } -} - -func buildEndByState(state *State, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - state.BaseState.End = end - return end -} - -func buildEndByDefaultCondition(defaultCondition *DefaultCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - defaultCondition.End = end - return end -} - -func buildEndByDataCondition(dataCondition *DataCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - dataCondition.End = end - return end -} - -func buildEndByEventCondition(eventCondition *EventCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - eventCondition.End = end - return end -} - -func buildStart(workflow *Workflow, state *State) { - start := &Start{ - StateName: state.BaseState.Name, - } - workflow.BaseWorkflow.Start = start -} - -func buildTransitionByState(state, nextState *State, compensate bool) { - state.BaseState.Transition = &Transition{ - NextState: nextState.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDataCondition(dataCondition *DataCondition, state *State, compensate bool) { - dataCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByEventCondition(eventCondition *EventCondition, state *State, compensate bool) { - eventCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDefaultCondition(defaultCondition *DefaultCondition, state *State) { - defaultCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - } -} - -func buildTimeouts(workflow *Workflow) *Timeouts { - timeouts := Timeouts{} - workflow.BaseWorkflow.Timeouts = &timeouts - return workflow.BaseWorkflow.Timeouts -} - -func TestBaseWorkflowStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "id exclude key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "id" - model.Key = "" - return *model - }, - }, - { - Desp: "key exclude id", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "key" - return *model - }, - }, - { - Desp: "without id and key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "" - return *model - }, - Err: `workflow.id required when "workflow.key" is not defined -workflow.key required when "workflow.id" is not defined`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.ExpressionLang = JqExpressionLang + "invalid" - return *model - }, - Err: `workflow.expressionLang need by one of [jq jsonpath cel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestContinueAsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].BaseState.End.ContinueAs = &ContinueAs{ - WorkflowID: "sub workflow", - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "P1M", - }, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End.ContinueAs.WorkflowID = "" - return *model - }, - Err: `workflow.states[0].end.continueAs.workflowID is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOnErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - baseWorkflow.States[0].BaseState.OnErrors = []OnError{{ - ErrorRef: "error 1", - }, { - ErrorRefs: []string{"error 1", "error 2"}, - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[0].ErrorRef = "error 1" - model.States[0].OnErrors[0].ErrorRefs = []string{"error 2"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists and exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists errorRef", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef don't exist "invalid error name"`, - }, - { - Desp: "exists errorRefs", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRefs don't exist ["invalid error name"]`, - }, - { - Desp: "duplicate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[1].ErrorRefs = []string{"error 1", "error 1"} - return *model - }, - Err: `workflow.states[0].onErrors[1].errorRefs has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStartStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildStart(baseWorkflow, operationState) - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "" - return *model - }, - Err: `workflow.start.stateName is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "start state not found" - return *model - }, - Err: `workflow.start.stateName don't exist "start state not found"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestTransitionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 5) - - operationState := buildOperationState(baseWorkflow, "start state") - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - operationState2.BaseState.CompensatedBy = "compensation next state 1" - action2 := buildActionByOperationState(operationState2, "action 1") - buildFunctionRef(baseWorkflow, action2, "function 2") - - buildTransitionByState(operationState, operationState2, false) - - operationState3 := buildOperationState(baseWorkflow, "compensation next state 1") - operationState3.BaseState.UsedForCompensation = true - action3 := buildActionByOperationState(operationState3, "action 1") - buildFunctionRef(baseWorkflow, action3, "function 3") - - operationState4 := buildOperationState(baseWorkflow, "compensation next state 2") - operationState4.BaseState.UsedForCompensation = true - action4 := buildActionByOperationState(operationState4, "action 1") - buildFunctionRef(baseWorkflow, action4, "function 4") - - buildTransitionByState(operationState3, operationState4, false) - - operationState5 := buildOperationState(baseWorkflow, "compensation next state 3") - buildEndByState(operationState5, true, false) - operationState5.BaseState.UsedForCompensation = true - action5 := buildActionByOperationState(operationState5, "action 5") - buildFunctionRef(baseWorkflow, action5, "function 5") - - buildTransitionByState(operationState4, operationState5, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "state recursive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].transition.nextState can't no be recursive "start state"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = "invalid next state" - return *model - }, - Err: `workflow.states[0].transition.nextState don't exist "invalid next state"`, - }, - { - Desp: "transitionusedforcompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[3].BaseState.UsedForCompensation = false - return *model - }, - Err: `Key: 'Workflow.States[2].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transitionusedforcompensation' tag -Key: 'Workflow.States[3].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - { - Desp: "transtionmainworkflow", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[3].BaseState.Name - return *model - }, - Err: `Key: 'Workflow.States[0].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataInputSchemaStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - sampleSchema := FromString("sample schema") - - testCases := []ValidationCase{ - { - Desp: "empty DataInputSchema", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{} - return *model - }, - Err: `workflow.dataInputSchema.schema is required`, - }, - { - Desp: "filled Schema, default failOnValidationErrors", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{ - Schema: &sampleSchema, - } - return *model - }, - }, - } - - //fmt.Printf("%+v", testCases[0].Model) - StructLevelValidationCtx(t, testCases) -} - -func TestSecretsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "workflow secrets.name repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Secrets = []string{"secret 1", "secret 1"} - return *model - }, - Err: `workflow.secrets has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors[0].Name = "" - return *model - }, - Err: `workflow.errors[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors = Errors{model.Errors[0], model.Errors[0]} - return *model - }, - Err: `workflow.errors has duplicate "name"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -type ValidationCase struct { - Desp string - Model func() Workflow - Err string -} - -func StructLevelValidationCtx(t *testing.T, testCases []ValidationCase) { - for _, tc := range testCases { - t.Run(tc.Desp, func(t *testing.T) { - model := tc.Model() - err := val.GetValidator().StructCtx(NewValidatorContext(&model), model) - err = val.WorkflowError(err) - if tc.Err != "" { - if assert.Error(t, err) { - assert.Equal(t, tc.Err, err.Error()) - } - } else { - assert.NoError(t, err) - } - }) - } -} diff --git a/model/zz_generated.buildergen.go b/model/zz_generated.buildergen.go deleted file mode 100644 index 42564fe..0000000 --- a/model/zz_generated.buildergen.go +++ /dev/null @@ -1,3139 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by builder-gen. DO NOT EDIT. - -package model - -import ( - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionBuilder() *ActionBuilder { - builder := &ActionBuilder{} - builder.model = Action{} - builder.model.ApplyDefault() - builder.actiondatafilter = NewActionDataFilterBuilder() - return builder -} - -type ActionBuilder struct { - model Action - functionref *FunctionRefBuilder - eventref *EventRefBuilder - subflowref *WorkflowRefBuilder - sleep *SleepBuilder - actiondatafilter *ActionDataFilterBuilder -} - -func (b *ActionBuilder) ID(input string) *ActionBuilder { - b.model.ID = input - return b -} - -func (b *ActionBuilder) Name(input string) *ActionBuilder { - b.model.Name = input - return b -} - -func (b *ActionBuilder) FunctionRef() *FunctionRefBuilder { - if b.functionref == nil { - b.functionref = NewFunctionRefBuilder() - } - return b.functionref -} - -func (b *ActionBuilder) EventRef() *EventRefBuilder { - if b.eventref == nil { - b.eventref = NewEventRefBuilder() - } - return b.eventref -} - -func (b *ActionBuilder) SubFlowRef() *WorkflowRefBuilder { - if b.subflowref == nil { - b.subflowref = NewWorkflowRefBuilder() - } - return b.subflowref -} - -func (b *ActionBuilder) Sleep() *SleepBuilder { - if b.sleep == nil { - b.sleep = NewSleepBuilder() - } - return b.sleep -} - -func (b *ActionBuilder) RetryRef(input string) *ActionBuilder { - b.model.RetryRef = input - return b -} - -func (b *ActionBuilder) NonRetryableErrors(input []string) *ActionBuilder { - b.model.NonRetryableErrors = input - return b -} - -func (b *ActionBuilder) RetryableErrors(input []string) *ActionBuilder { - b.model.RetryableErrors = input - return b -} - -func (b *ActionBuilder) ActionDataFilter() *ActionDataFilterBuilder { - return b.actiondatafilter -} - -func (b *ActionBuilder) Condition(input string) *ActionBuilder { - b.model.Condition = input - return b -} - -func (b *ActionBuilder) Build() Action { - if b.functionref != nil { - functionref := b.functionref.Build() - b.model.FunctionRef = &functionref - } - if b.eventref != nil { - eventref := b.eventref.Build() - b.model.EventRef = &eventref - } - if b.subflowref != nil { - subflowref := b.subflowref.Build() - b.model.SubFlowRef = &subflowref - } - if b.sleep != nil { - sleep := b.sleep.Build() - b.model.Sleep = &sleep - } - b.model.ActionDataFilter = b.actiondatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionDataFilterBuilder() *ActionDataFilterBuilder { - builder := &ActionDataFilterBuilder{} - builder.model = ActionDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type ActionDataFilterBuilder struct { - model ActionDataFilter -} - -func (b *ActionDataFilterBuilder) FromStateData(input string) *ActionDataFilterBuilder { - b.model.FromStateData = input - return b -} - -func (b *ActionDataFilterBuilder) UseResults(input bool) *ActionDataFilterBuilder { - b.model.UseResults = input - return b -} - -func (b *ActionDataFilterBuilder) Results(input string) *ActionDataFilterBuilder { - b.model.Results = input - return b -} - -func (b *ActionDataFilterBuilder) ToStateData(input string) *ActionDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *ActionDataFilterBuilder) Build() ActionDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthBuilder() *AuthBuilder { - builder := &AuthBuilder{} - builder.model = Auth{} - builder.properties = NewAuthPropertiesBuilder() - return builder -} - -type AuthBuilder struct { - model Auth - properties *AuthPropertiesBuilder -} - -func (b *AuthBuilder) Name(input string) *AuthBuilder { - b.model.Name = input - return b -} - -func (b *AuthBuilder) Scheme(input AuthType) *AuthBuilder { - b.model.Scheme = input - return b -} - -func (b *AuthBuilder) Properties() *AuthPropertiesBuilder { - return b.properties -} - -func (b *AuthBuilder) Build() Auth { - b.model.Properties = b.properties.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthPropertiesBuilder() *AuthPropertiesBuilder { - builder := &AuthPropertiesBuilder{} - builder.model = AuthProperties{} - return builder -} - -type AuthPropertiesBuilder struct { - model AuthProperties - basic *BasicAuthPropertiesBuilder - bearer *BearerAuthPropertiesBuilder - oauth2 *OAuth2AuthPropertiesBuilder -} - -func (b *AuthPropertiesBuilder) Basic() *BasicAuthPropertiesBuilder { - if b.basic == nil { - b.basic = NewBasicAuthPropertiesBuilder() - } - return b.basic -} - -func (b *AuthPropertiesBuilder) Bearer() *BearerAuthPropertiesBuilder { - if b.bearer == nil { - b.bearer = NewBearerAuthPropertiesBuilder() - } - return b.bearer -} - -func (b *AuthPropertiesBuilder) OAuth2() *OAuth2AuthPropertiesBuilder { - if b.oauth2 == nil { - b.oauth2 = NewOAuth2AuthPropertiesBuilder() - } - return b.oauth2 -} - -func (b *AuthPropertiesBuilder) Build() AuthProperties { - if b.basic != nil { - basic := b.basic.Build() - b.model.Basic = &basic - } - if b.bearer != nil { - bearer := b.bearer.Build() - b.model.Bearer = &bearer - } - if b.oauth2 != nil { - oauth2 := b.oauth2.Build() - b.model.OAuth2 = &oauth2 - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthsBuilder() *AuthsBuilder { - builder := &AuthsBuilder{} - builder.model = Auths{} - return builder -} - -type AuthsBuilder struct { - model Auths -} - -func (b *AuthsBuilder) Build() Auths { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseStateBuilder() *BaseStateBuilder { - builder := &BaseStateBuilder{} - builder.model = BaseState{} - builder.onerrors = []*OnErrorBuilder{} - return builder -} - -type BaseStateBuilder struct { - model BaseState - onerrors []*OnErrorBuilder - transition *TransitionBuilder - statedatafilter *StateDataFilterBuilder - end *EndBuilder -} - -func (b *BaseStateBuilder) ID(input string) *BaseStateBuilder { - b.model.ID = input - return b -} - -func (b *BaseStateBuilder) Name(input string) *BaseStateBuilder { - b.model.Name = input - return b -} - -func (b *BaseStateBuilder) Type(input StateType) *BaseStateBuilder { - b.model.Type = input - return b -} - -func (b *BaseStateBuilder) AddOnErrors() *OnErrorBuilder { - builder := NewOnErrorBuilder() - b.onerrors = append(b.onerrors, builder) - return builder -} - -func (b *BaseStateBuilder) RemoveOnErrors(remove *OnErrorBuilder) { - for i, val := range b.onerrors { - if val == remove { - b.onerrors[i] = b.onerrors[len(b.onerrors)-1] - b.onerrors = b.onerrors[:len(b.onerrors)-1] - } - } -} -func (b *BaseStateBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *BaseStateBuilder) StateDataFilter() *StateDataFilterBuilder { - if b.statedatafilter == nil { - b.statedatafilter = NewStateDataFilterBuilder() - } - return b.statedatafilter -} - -func (b *BaseStateBuilder) CompensatedBy(input string) *BaseStateBuilder { - b.model.CompensatedBy = input - return b -} - -func (b *BaseStateBuilder) UsedForCompensation(input bool) *BaseStateBuilder { - b.model.UsedForCompensation = input - return b -} - -func (b *BaseStateBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *BaseStateBuilder) Build() BaseState { - b.model.OnErrors = []OnError{} - for _, v := range b.onerrors { - b.model.OnErrors = append(b.model.OnErrors, v.Build()) - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.statedatafilter != nil { - statedatafilter := b.statedatafilter.Build() - b.model.StateDataFilter = &statedatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseWorkflowBuilder() *BaseWorkflowBuilder { - builder := &BaseWorkflowBuilder{} - builder.model = BaseWorkflow{} - builder.model.ApplyDefault() - builder.errors = []*ErrorBuilder{} - builder.auth = []*AuthBuilder{} - return builder -} - -type BaseWorkflowBuilder struct { - model BaseWorkflow - start *StartBuilder - datainputschema *DataInputSchemaBuilder - constants *ConstantsBuilder - timeouts *TimeoutsBuilder - errors []*ErrorBuilder - auth []*AuthBuilder -} - -func (b *BaseWorkflowBuilder) ID(input string) *BaseWorkflowBuilder { - b.model.ID = input - return b -} - -func (b *BaseWorkflowBuilder) Key(input string) *BaseWorkflowBuilder { - b.model.Key = input - return b -} - -func (b *BaseWorkflowBuilder) Name(input string) *BaseWorkflowBuilder { - b.model.Name = input - return b -} - -func (b *BaseWorkflowBuilder) Description(input string) *BaseWorkflowBuilder { - b.model.Description = input - return b -} - -func (b *BaseWorkflowBuilder) Version(input string) *BaseWorkflowBuilder { - b.model.Version = input - return b -} - -func (b *BaseWorkflowBuilder) Start() *StartBuilder { - if b.start == nil { - b.start = NewStartBuilder() - } - return b.start -} - -func (b *BaseWorkflowBuilder) Annotations(input []string) *BaseWorkflowBuilder { - b.model.Annotations = input - return b -} - -func (b *BaseWorkflowBuilder) DataInputSchema() *DataInputSchemaBuilder { - if b.datainputschema == nil { - b.datainputschema = NewDataInputSchemaBuilder() - } - return b.datainputschema -} - -func (b *BaseWorkflowBuilder) SpecVersion(input string) *BaseWorkflowBuilder { - b.model.SpecVersion = input - return b -} - -func (b *BaseWorkflowBuilder) Secrets(input Secrets) *BaseWorkflowBuilder { - b.model.Secrets = input - return b -} - -func (b *BaseWorkflowBuilder) Constants() *ConstantsBuilder { - if b.constants == nil { - b.constants = NewConstantsBuilder() - } - return b.constants -} - -func (b *BaseWorkflowBuilder) ExpressionLang(input ExpressionLangType) *BaseWorkflowBuilder { - b.model.ExpressionLang = input - return b -} - -func (b *BaseWorkflowBuilder) Timeouts() *TimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BaseWorkflowBuilder) AddErrors() *ErrorBuilder { - builder := NewErrorBuilder() - b.errors = append(b.errors, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveErrors(remove *ErrorBuilder) { - for i, val := range b.errors { - if val == remove { - b.errors[i] = b.errors[len(b.errors)-1] - b.errors = b.errors[:len(b.errors)-1] - } - } -} -func (b *BaseWorkflowBuilder) KeepActive(input bool) *BaseWorkflowBuilder { - b.model.KeepActive = input - return b -} - -func (b *BaseWorkflowBuilder) Metadata(input Metadata) *BaseWorkflowBuilder { - b.model.Metadata = input - return b -} - -func (b *BaseWorkflowBuilder) AutoRetries(input bool) *BaseWorkflowBuilder { - b.model.AutoRetries = input - return b -} - -func (b *BaseWorkflowBuilder) AddAuth() *AuthBuilder { - builder := NewAuthBuilder() - b.auth = append(b.auth, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveAuth(remove *AuthBuilder) { - for i, val := range b.auth { - if val == remove { - b.auth[i] = b.auth[len(b.auth)-1] - b.auth = b.auth[:len(b.auth)-1] - } - } -} -func (b *BaseWorkflowBuilder) Build() BaseWorkflow { - if b.start != nil { - start := b.start.Build() - b.model.Start = &start - } - if b.datainputschema != nil { - datainputschema := b.datainputschema.Build() - b.model.DataInputSchema = &datainputschema - } - if b.constants != nil { - constants := b.constants.Build() - b.model.Constants = &constants - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - b.model.Errors = []Error{} - for _, v := range b.errors { - b.model.Errors = append(b.model.Errors, v.Build()) - } - b.model.Auth = []Auth{} - for _, v := range b.auth { - b.model.Auth = append(b.model.Auth, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBasicAuthPropertiesBuilder() *BasicAuthPropertiesBuilder { - builder := &BasicAuthPropertiesBuilder{} - builder.model = BasicAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BasicAuthPropertiesBuilder struct { - model BasicAuthProperties - CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Secret(input string) *BasicAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Username(input string) *BasicAuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Password(input string) *BasicAuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Build() BasicAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBearerAuthPropertiesBuilder() *BearerAuthPropertiesBuilder { - builder := &BearerAuthPropertiesBuilder{} - builder.model = BearerAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BearerAuthPropertiesBuilder struct { - model BearerAuthProperties - CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Secret(input string) *BearerAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Token(input string) *BearerAuthPropertiesBuilder { - b.model.Token = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Build() BearerAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchBuilder() *BranchBuilder { - builder := &BranchBuilder{} - builder.model = Branch{} - builder.actions = []*ActionBuilder{} - return builder -} - -type BranchBuilder struct { - model Branch - actions []*ActionBuilder - timeouts *BranchTimeoutsBuilder -} - -func (b *BranchBuilder) Name(input string) *BranchBuilder { - b.model.Name = input - return b -} - -func (b *BranchBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *BranchBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *BranchBuilder) Timeouts() *BranchTimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewBranchTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BranchBuilder) Build() Branch { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchTimeoutsBuilder() *BranchTimeoutsBuilder { - builder := &BranchTimeoutsBuilder{} - builder.model = BranchTimeouts{} - return builder -} - -type BranchTimeoutsBuilder struct { - model BranchTimeouts -} - -func (b *BranchTimeoutsBuilder) ActionExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) BranchExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) Build() BranchTimeouts { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateBuilder() *CallbackStateBuilder { - builder := &CallbackStateBuilder{} - builder.model = CallbackState{} - builder.action = NewActionBuilder() - return builder -} - -type CallbackStateBuilder struct { - model CallbackState - action *ActionBuilder - timeouts *CallbackStateTimeoutBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *CallbackStateBuilder) Action() *ActionBuilder { - return b.action -} - -func (b *CallbackStateBuilder) EventRef(input string) *CallbackStateBuilder { - b.model.EventRef = input - return b -} - -func (b *CallbackStateBuilder) Timeouts() *CallbackStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewCallbackStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *CallbackStateBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *CallbackStateBuilder) Build() CallbackState { - b.model.Action = b.action.Build() - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateTimeoutBuilder() *CallbackStateTimeoutBuilder { - builder := &CallbackStateTimeoutBuilder{} - builder.model = CallbackStateTimeout{} - return builder -} - -type CallbackStateTimeoutBuilder struct { - model CallbackStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *CallbackStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *CallbackStateTimeoutBuilder) ActionExecTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) EventTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) Build() CallbackStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCommonBuilder() *CommonBuilder { - builder := &CommonBuilder{} - builder.model = Common{} - return builder -} - -type CommonBuilder struct { - model Common -} - -func (b *CommonBuilder) Metadata(input Metadata) *CommonBuilder { - b.model.Metadata = input - return b -} - -func (b *CommonBuilder) Build() Common { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsBuilder() *ConstantsBuilder { - builder := &ConstantsBuilder{} - builder.model = Constants{} - return builder -} - -type ConstantsBuilder struct { - model Constants -} - -func (b *ConstantsBuilder) Data(input ConstantsData) *ConstantsBuilder { - b.model.Data = input - return b -} - -func (b *ConstantsBuilder) Build() Constants { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsDataBuilder() *ConstantsDataBuilder { - builder := &ConstantsDataBuilder{} - builder.model = ConstantsData{} - return builder -} - -type ConstantsDataBuilder struct { - model ConstantsData -} - -func (b *ConstantsDataBuilder) Build() ConstantsData { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewContinueAsBuilder() *ContinueAsBuilder { - builder := &ContinueAsBuilder{} - builder.model = ContinueAs{} - builder.data = NewObjectBuilder() - builder.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - return builder -} - -type ContinueAsBuilder struct { - model ContinueAs - data *ObjectBuilder - workflowexectimeout *WorkflowExecTimeoutBuilder -} - -func (b *ContinueAsBuilder) WorkflowID(input string) *ContinueAsBuilder { - b.model.WorkflowID = input - return b -} - -func (b *ContinueAsBuilder) Version(input string) *ContinueAsBuilder { - b.model.Version = input - return b -} - -func (b *ContinueAsBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ContinueAsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - return b.workflowexectimeout -} - -func (b *ContinueAsBuilder) Build() ContinueAs { - b.model.Data = b.data.Build() - b.model.WorkflowExecTimeout = b.workflowexectimeout.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCorrelationBuilder() *CorrelationBuilder { - builder := &CorrelationBuilder{} - builder.model = Correlation{} - return builder -} - -type CorrelationBuilder struct { - model Correlation -} - -func (b *CorrelationBuilder) ContextAttributeName(input string) *CorrelationBuilder { - b.model.ContextAttributeName = input - return b -} - -func (b *CorrelationBuilder) ContextAttributeValue(input string) *CorrelationBuilder { - b.model.ContextAttributeValue = input - return b -} - -func (b *CorrelationBuilder) Build() Correlation { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCronBuilder() *CronBuilder { - builder := &CronBuilder{} - builder.model = Cron{} - return builder -} - -type CronBuilder struct { - model Cron -} - -func (b *CronBuilder) Expression(input string) *CronBuilder { - b.model.Expression = input - return b -} - -func (b *CronBuilder) ValidUntil(input string) *CronBuilder { - b.model.ValidUntil = input - return b -} - -func (b *CronBuilder) Build() Cron { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataConditionBuilder() *DataConditionBuilder { - builder := &DataConditionBuilder{} - builder.model = DataCondition{} - return builder -} - -type DataConditionBuilder struct { - model DataCondition - end *EndBuilder - transition *TransitionBuilder -} - -func (b *DataConditionBuilder) Name(input string) *DataConditionBuilder { - b.model.Name = input - return b -} - -func (b *DataConditionBuilder) Condition(input string) *DataConditionBuilder { - b.model.Condition = input - return b -} - -func (b *DataConditionBuilder) Metadata(input Metadata) *DataConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *DataConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DataConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DataConditionBuilder) Build() DataCondition { - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataInputSchemaBuilder() *DataInputSchemaBuilder { - builder := &DataInputSchemaBuilder{} - builder.model = DataInputSchema{} - builder.model.ApplyDefault() - return builder -} - -type DataInputSchemaBuilder struct { - model DataInputSchema - schema *ObjectBuilder -} - -func (b *DataInputSchemaBuilder) Schema() *ObjectBuilder { - if b.schema == nil { - b.schema = NewObjectBuilder() - } - return b.schema -} - -func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSchemaBuilder { - b.model.FailOnValidationErrors = input - return b -} - -func (b *DataInputSchemaBuilder) Build() DataInputSchema { - if b.schema != nil { - schema := b.schema.Build() - b.model.Schema = &schema - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDefaultConditionBuilder() *DefaultConditionBuilder { - builder := &DefaultConditionBuilder{} - builder.model = DefaultCondition{} - return builder -} - -type DefaultConditionBuilder struct { - model DefaultCondition - transition *TransitionBuilder - end *EndBuilder -} - -func (b *DefaultConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DefaultConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DefaultConditionBuilder) Build() DefaultCondition { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDelayStateBuilder() *DelayStateBuilder { - builder := &DelayStateBuilder{} - builder.model = DelayState{} - return builder -} - -type DelayStateBuilder struct { - model DelayState -} - -func (b *DelayStateBuilder) TimeDelay(input string) *DelayStateBuilder { - b.model.TimeDelay = input - return b -} - -func (b *DelayStateBuilder) Build() DelayState { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEndBuilder() *EndBuilder { - builder := &EndBuilder{} - builder.model = End{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type EndBuilder struct { - model End - produceevents []*ProduceEventBuilder - continueas *ContinueAsBuilder -} - -func (b *EndBuilder) Terminate(input bool) *EndBuilder { - b.model.Terminate = input - return b -} - -func (b *EndBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *EndBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *EndBuilder) Compensate(input bool) *EndBuilder { - b.model.Compensate = input - return b -} - -func (b *EndBuilder) ContinueAs() *ContinueAsBuilder { - if b.continueas == nil { - b.continueas = NewContinueAsBuilder() - } - return b.continueas -} - -func (b *EndBuilder) Build() End { - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - if b.continueas != nil { - continueas := b.continueas.Build() - b.model.ContinueAs = &continueas - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorBuilder() *ErrorBuilder { - builder := &ErrorBuilder{} - builder.model = Error{} - return builder -} - -type ErrorBuilder struct { - model Error -} - -func (b *ErrorBuilder) Name(input string) *ErrorBuilder { - b.model.Name = input - return b -} - -func (b *ErrorBuilder) Code(input string) *ErrorBuilder { - b.model.Code = input - return b -} - -func (b *ErrorBuilder) Description(input string) *ErrorBuilder { - b.model.Description = input - return b -} - -func (b *ErrorBuilder) Build() Error { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorsBuilder() *ErrorsBuilder { - builder := &ErrorsBuilder{} - builder.model = Errors{} - return builder -} - -type ErrorsBuilder struct { - model Errors -} - -func (b *ErrorsBuilder) Build() Errors { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventBuilder() *EventBuilder { - builder := &EventBuilder{} - builder.model = Event{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - builder.correlation = []*CorrelationBuilder{} - return builder -} - -type EventBuilder struct { - model Event - CommonBuilder - correlation []*CorrelationBuilder -} - -func (b *EventBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *EventBuilder) Name(input string) *EventBuilder { - b.model.Name = input - return b -} - -func (b *EventBuilder) Source(input string) *EventBuilder { - b.model.Source = input - return b -} - -func (b *EventBuilder) Type(input string) *EventBuilder { - b.model.Type = input - return b -} - -func (b *EventBuilder) Kind(input EventKind) *EventBuilder { - b.model.Kind = input - return b -} - -func (b *EventBuilder) DataOnly(input bool) *EventBuilder { - b.model.DataOnly = input - return b -} - -func (b *EventBuilder) AddCorrelation() *CorrelationBuilder { - builder := NewCorrelationBuilder() - b.correlation = append(b.correlation, builder) - return builder -} - -func (b *EventBuilder) RemoveCorrelation(remove *CorrelationBuilder) { - for i, val := range b.correlation { - if val == remove { - b.correlation[i] = b.correlation[len(b.correlation)-1] - b.correlation = b.correlation[:len(b.correlation)-1] - } - } -} -func (b *EventBuilder) Build() Event { - b.model.Common = b.CommonBuilder.Build() - b.model.Correlation = []Correlation{} - for _, v := range b.correlation { - b.model.Correlation = append(b.model.Correlation, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionBuilder() *EventConditionBuilder { - builder := &EventConditionBuilder{} - builder.model = EventCondition{} - return builder -} - -type EventConditionBuilder struct { - model EventCondition - eventdatafilter *EventDataFilterBuilder - end *EndBuilder - transition *TransitionBuilder -} - -func (b *EventConditionBuilder) Name(input string) *EventConditionBuilder { - b.model.Name = input - return b -} - -func (b *EventConditionBuilder) EventRef(input string) *EventConditionBuilder { - b.model.EventRef = input - return b -} - -func (b *EventConditionBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *EventConditionBuilder) Metadata(input Metadata) *EventConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *EventConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *EventConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *EventConditionBuilder) Build() EventCondition { - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionsBuilder() *EventConditionsBuilder { - builder := &EventConditionsBuilder{} - builder.model = EventConditions{} - return builder -} - -type EventConditionsBuilder struct { - model EventConditions -} - -func (b *EventConditionsBuilder) Build() EventConditions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventDataFilterBuilder() *EventDataFilterBuilder { - builder := &EventDataFilterBuilder{} - builder.model = EventDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type EventDataFilterBuilder struct { - model EventDataFilter -} - -func (b *EventDataFilterBuilder) UseData(input bool) *EventDataFilterBuilder { - b.model.UseData = input - return b -} - -func (b *EventDataFilterBuilder) Data(input string) *EventDataFilterBuilder { - b.model.Data = input - return b -} - -func (b *EventDataFilterBuilder) ToStateData(input string) *EventDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *EventDataFilterBuilder) Build() EventDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventRefBuilder() *EventRefBuilder { - builder := &EventRefBuilder{} - builder.model = EventRef{} - builder.model.ApplyDefault() - return builder -} - -type EventRefBuilder struct { - model EventRef - data *ObjectBuilder -} - -func (b *EventRefBuilder) TriggerEventRef(input string) *EventRefBuilder { - b.model.TriggerEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventRef(input string) *EventRefBuilder { - b.model.ResultEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventTimeout(input string) *EventRefBuilder { - b.model.ResultEventTimeout = input - return b -} - -func (b *EventRefBuilder) Data() *ObjectBuilder { - if b.data == nil { - b.data = NewObjectBuilder() - } - return b.data -} - -func (b *EventRefBuilder) ContextAttributes(input map[string]Object) *EventRefBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *EventRefBuilder) Invoke(input InvokeKind) *EventRefBuilder { - b.model.Invoke = input - return b -} - -func (b *EventRefBuilder) Build() EventRef { - if b.data != nil { - data := b.data.Build() - b.model.Data = &data - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateBuilder() *EventStateBuilder { - builder := &EventStateBuilder{} - builder.model = EventState{} - builder.model.ApplyDefault() - builder.onevents = []*OnEventsBuilder{} - return builder -} - -type EventStateBuilder struct { - model EventState - onevents []*OnEventsBuilder - timeouts *EventStateTimeoutBuilder -} - -func (b *EventStateBuilder) Exclusive(input bool) *EventStateBuilder { - b.model.Exclusive = input - return b -} - -func (b *EventStateBuilder) AddOnEvents() *OnEventsBuilder { - builder := NewOnEventsBuilder() - b.onevents = append(b.onevents, builder) - return builder -} - -func (b *EventStateBuilder) RemoveOnEvents(remove *OnEventsBuilder) { - for i, val := range b.onevents { - if val == remove { - b.onevents[i] = b.onevents[len(b.onevents)-1] - b.onevents = b.onevents[:len(b.onevents)-1] - } - } -} -func (b *EventStateBuilder) Timeouts() *EventStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewEventStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *EventStateBuilder) Build() EventState { - b.model.OnEvents = []OnEvents{} - for _, v := range b.onevents { - b.model.OnEvents = append(b.model.OnEvents, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateTimeoutBuilder() *EventStateTimeoutBuilder { - builder := &EventStateTimeoutBuilder{} - builder.model = EventStateTimeout{} - return builder -} - -type EventStateTimeoutBuilder struct { - model EventStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *EventStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *EventStateTimeoutBuilder) ActionExecTimeout(input string) *EventStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) EventTimeout(input string) *EventStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) Build() EventStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventsBuilder() *EventsBuilder { - builder := &EventsBuilder{} - builder.model = Events{} - return builder -} - -type EventsBuilder struct { - model Events -} - -func (b *EventsBuilder) Build() Events { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateBuilder() *ForEachStateBuilder { - builder := &ForEachStateBuilder{} - builder.model = ForEachState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type ForEachStateBuilder struct { - model ForEachState - actions []*ActionBuilder - timeouts *ForEachStateTimeoutBuilder -} - -func (b *ForEachStateBuilder) InputCollection(input string) *ForEachStateBuilder { - b.model.InputCollection = input - return b -} - -func (b *ForEachStateBuilder) OutputCollection(input string) *ForEachStateBuilder { - b.model.OutputCollection = input - return b -} - -func (b *ForEachStateBuilder) IterationParam(input string) *ForEachStateBuilder { - b.model.IterationParam = input - return b -} - -func (b *ForEachStateBuilder) BatchSize(input *intstr.IntOrString) *ForEachStateBuilder { - b.model.BatchSize = input - return b -} - -func (b *ForEachStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *ForEachStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *ForEachStateBuilder) Timeouts() *ForEachStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewForEachStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ForEachStateBuilder) Mode(input ForEachModeType) *ForEachStateBuilder { - b.model.Mode = input - return b -} - -func (b *ForEachStateBuilder) Build() ForEachState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateTimeoutBuilder() *ForEachStateTimeoutBuilder { - builder := &ForEachStateTimeoutBuilder{} - builder.model = ForEachStateTimeout{} - return builder -} - -type ForEachStateTimeoutBuilder struct { - model ForEachStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ForEachStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ForEachStateTimeoutBuilder) ActionExecTimeout(input string) *ForEachStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *ForEachStateTimeoutBuilder) Build() ForEachStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionBuilder() *FunctionBuilder { - builder := &FunctionBuilder{} - builder.model = Function{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type FunctionBuilder struct { - model Function - CommonBuilder -} - -func (b *FunctionBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *FunctionBuilder) Name(input string) *FunctionBuilder { - b.model.Name = input - return b -} - -func (b *FunctionBuilder) Operation(input string) *FunctionBuilder { - b.model.Operation = input - return b -} - -func (b *FunctionBuilder) Type(input FunctionType) *FunctionBuilder { - b.model.Type = input - return b -} - -func (b *FunctionBuilder) AuthRef(input string) *FunctionBuilder { - b.model.AuthRef = input - return b -} - -func (b *FunctionBuilder) Build() Function { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionRefBuilder() *FunctionRefBuilder { - builder := &FunctionRefBuilder{} - builder.model = FunctionRef{} - builder.model.ApplyDefault() - return builder -} - -type FunctionRefBuilder struct { - model FunctionRef -} - -func (b *FunctionRefBuilder) RefName(input string) *FunctionRefBuilder { - b.model.RefName = input - return b -} - -func (b *FunctionRefBuilder) Arguments(input map[string]Object) *FunctionRefBuilder { - b.model.Arguments = input - return b -} - -func (b *FunctionRefBuilder) SelectionSet(input string) *FunctionRefBuilder { - b.model.SelectionSet = input - return b -} - -func (b *FunctionRefBuilder) Invoke(input InvokeKind) *FunctionRefBuilder { - b.model.Invoke = input - return b -} - -func (b *FunctionRefBuilder) Build() FunctionRef { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionsBuilder() *FunctionsBuilder { - builder := &FunctionsBuilder{} - builder.model = Functions{} - return builder -} - -type FunctionsBuilder struct { - model Functions -} - -func (b *FunctionsBuilder) Build() Functions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateBuilder() *InjectStateBuilder { - builder := &InjectStateBuilder{} - builder.model = InjectState{} - return builder -} - -type InjectStateBuilder struct { - model InjectState - timeouts *InjectStateTimeoutBuilder -} - -func (b *InjectStateBuilder) Data(input map[string]Object) *InjectStateBuilder { - b.model.Data = input - return b -} - -func (b *InjectStateBuilder) Timeouts() *InjectStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewInjectStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *InjectStateBuilder) Build() InjectState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateTimeoutBuilder() *InjectStateTimeoutBuilder { - builder := &InjectStateTimeoutBuilder{} - builder.model = InjectStateTimeout{} - return builder -} - -type InjectStateTimeoutBuilder struct { - model InjectStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *InjectStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *InjectStateTimeoutBuilder) Build() InjectStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewMetadataBuilder() *MetadataBuilder { - builder := &MetadataBuilder{} - builder.model = Metadata{} - return builder -} - -type MetadataBuilder struct { - model Metadata -} - -func (b *MetadataBuilder) Build() Metadata { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOAuth2AuthPropertiesBuilder() *OAuth2AuthPropertiesBuilder { - builder := &OAuth2AuthPropertiesBuilder{} - builder.model = OAuth2AuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type OAuth2AuthPropertiesBuilder struct { - model OAuth2AuthProperties - CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Secret(input string) *OAuth2AuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Authority(input string) *OAuth2AuthPropertiesBuilder { - b.model.Authority = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) GrantType(input GrantType) *OAuth2AuthPropertiesBuilder { - b.model.GrantType = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientID(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientID = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientSecret(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientSecret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Scopes(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Scopes = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Username(input string) *OAuth2AuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Password(input string) *OAuth2AuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Audiences(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Audiences = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) SubjectToken(input string) *OAuth2AuthPropertiesBuilder { - b.model.SubjectToken = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedSubject(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedSubject = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedIssuer(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedIssuer = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Build() OAuth2AuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewObjectBuilder() *ObjectBuilder { - builder := &ObjectBuilder{} - builder.model = Object{} - builder.slicevalue = []*ObjectBuilder{} - return builder -} - -type ObjectBuilder struct { - model Object - slicevalue []*ObjectBuilder -} - -func (b *ObjectBuilder) Type(input Type) *ObjectBuilder { - b.model.Type = input - return b -} - -func (b *ObjectBuilder) StringValue(input string) *ObjectBuilder { - b.model.StringValue = input - return b -} - -func (b *ObjectBuilder) IntValue(input int32) *ObjectBuilder { - b.model.IntValue = input - return b -} - -func (b *ObjectBuilder) FloatValue(input float64) *ObjectBuilder { - b.model.FloatValue = input - return b -} - -func (b *ObjectBuilder) MapValue(input map[string]Object) *ObjectBuilder { - b.model.MapValue = input - return b -} - -func (b *ObjectBuilder) AddSliceValue() *ObjectBuilder { - builder := NewObjectBuilder() - b.slicevalue = append(b.slicevalue, builder) - return builder -} - -func (b *ObjectBuilder) RemoveSliceValue(remove *ObjectBuilder) { - for i, val := range b.slicevalue { - if val == remove { - b.slicevalue[i] = b.slicevalue[len(b.slicevalue)-1] - b.slicevalue = b.slicevalue[:len(b.slicevalue)-1] - } - } -} -func (b *ObjectBuilder) BoolValue(input bool) *ObjectBuilder { - b.model.BoolValue = input - return b -} - -func (b *ObjectBuilder) Build() Object { - b.model.SliceValue = []Object{} - for _, v := range b.slicevalue { - b.model.SliceValue = append(b.model.SliceValue, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnErrorBuilder() *OnErrorBuilder { - builder := &OnErrorBuilder{} - builder.model = OnError{} - return builder -} - -type OnErrorBuilder struct { - model OnError - transition *TransitionBuilder - end *EndBuilder -} - -func (b *OnErrorBuilder) ErrorRef(input string) *OnErrorBuilder { - b.model.ErrorRef = input - return b -} - -func (b *OnErrorBuilder) ErrorRefs(input []string) *OnErrorBuilder { - b.model.ErrorRefs = input - return b -} - -func (b *OnErrorBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *OnErrorBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *OnErrorBuilder) Build() OnError { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnEventsBuilder() *OnEventsBuilder { - builder := &OnEventsBuilder{} - builder.model = OnEvents{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - builder.eventdatafilter = NewEventDataFilterBuilder() - return builder -} - -type OnEventsBuilder struct { - model OnEvents - actions []*ActionBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *OnEventsBuilder) EventRefs(input []string) *OnEventsBuilder { - b.model.EventRefs = input - return b -} - -func (b *OnEventsBuilder) ActionMode(input ActionMode) *OnEventsBuilder { - b.model.ActionMode = input - return b -} - -func (b *OnEventsBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OnEventsBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OnEventsBuilder) EventDataFilter() *EventDataFilterBuilder { - return b.eventdatafilter -} - -func (b *OnEventsBuilder) Build() OnEvents { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - b.model.EventDataFilter = b.eventdatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateBuilder() *OperationStateBuilder { - builder := &OperationStateBuilder{} - builder.model = OperationState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type OperationStateBuilder struct { - model OperationState - actions []*ActionBuilder - timeouts *OperationStateTimeoutBuilder -} - -func (b *OperationStateBuilder) ActionMode(input ActionMode) *OperationStateBuilder { - b.model.ActionMode = input - return b -} - -func (b *OperationStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OperationStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OperationStateBuilder) Timeouts() *OperationStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewOperationStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *OperationStateBuilder) Build() OperationState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateTimeoutBuilder() *OperationStateTimeoutBuilder { - builder := &OperationStateTimeoutBuilder{} - builder.model = OperationStateTimeout{} - return builder -} - -type OperationStateTimeoutBuilder struct { - model OperationStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *OperationStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *OperationStateTimeoutBuilder) ActionExecTimeout(input string) *OperationStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *OperationStateTimeoutBuilder) Build() OperationStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateBuilder() *ParallelStateBuilder { - builder := &ParallelStateBuilder{} - builder.model = ParallelState{} - builder.model.ApplyDefault() - builder.branches = []*BranchBuilder{} - return builder -} - -type ParallelStateBuilder struct { - model ParallelState - branches []*BranchBuilder - timeouts *ParallelStateTimeoutBuilder -} - -func (b *ParallelStateBuilder) AddBranches() *BranchBuilder { - builder := NewBranchBuilder() - b.branches = append(b.branches, builder) - return builder -} - -func (b *ParallelStateBuilder) RemoveBranches(remove *BranchBuilder) { - for i, val := range b.branches { - if val == remove { - b.branches[i] = b.branches[len(b.branches)-1] - b.branches = b.branches[:len(b.branches)-1] - } - } -} -func (b *ParallelStateBuilder) CompletionType(input CompletionType) *ParallelStateBuilder { - b.model.CompletionType = input - return b -} - -func (b *ParallelStateBuilder) NumCompleted(input intstr.IntOrString) *ParallelStateBuilder { - b.model.NumCompleted = input - return b -} - -func (b *ParallelStateBuilder) Timeouts() *ParallelStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewParallelStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ParallelStateBuilder) Build() ParallelState { - b.model.Branches = []Branch{} - for _, v := range b.branches { - b.model.Branches = append(b.model.Branches, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateTimeoutBuilder() *ParallelStateTimeoutBuilder { - builder := &ParallelStateTimeoutBuilder{} - builder.model = ParallelStateTimeout{} - return builder -} - -type ParallelStateTimeoutBuilder struct { - model ParallelStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ParallelStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ParallelStateTimeoutBuilder) BranchExecTimeout(input string) *ParallelStateTimeoutBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *ParallelStateTimeoutBuilder) Build() ParallelStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewProduceEventBuilder() *ProduceEventBuilder { - builder := &ProduceEventBuilder{} - builder.model = ProduceEvent{} - builder.data = NewObjectBuilder() - return builder -} - -type ProduceEventBuilder struct { - model ProduceEvent - data *ObjectBuilder -} - -func (b *ProduceEventBuilder) EventRef(input string) *ProduceEventBuilder { - b.model.EventRef = input - return b -} - -func (b *ProduceEventBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ProduceEventBuilder) ContextAttributes(input map[string]string) *ProduceEventBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *ProduceEventBuilder) Build() ProduceEvent { - b.model.Data = b.data.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetriesBuilder() *RetriesBuilder { - builder := &RetriesBuilder{} - builder.model = Retries{} - return builder -} - -type RetriesBuilder struct { - model Retries -} - -func (b *RetriesBuilder) Build() Retries { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetryBuilder() *RetryBuilder { - builder := &RetryBuilder{} - builder.model = Retry{} - builder.model.ApplyDefault() - return builder -} - -type RetryBuilder struct { - model Retry -} - -func (b *RetryBuilder) Name(input string) *RetryBuilder { - b.model.Name = input - return b -} - -func (b *RetryBuilder) Delay(input string) *RetryBuilder { - b.model.Delay = input - return b -} - -func (b *RetryBuilder) MaxDelay(input string) *RetryBuilder { - b.model.MaxDelay = input - return b -} - -func (b *RetryBuilder) Increment(input string) *RetryBuilder { - b.model.Increment = input - return b -} - -func (b *RetryBuilder) Multiplier(input *floatstr.Float32OrString) *RetryBuilder { - b.model.Multiplier = input - return b -} - -func (b *RetryBuilder) MaxAttempts(input intstr.IntOrString) *RetryBuilder { - b.model.MaxAttempts = input - return b -} - -func (b *RetryBuilder) Jitter(input floatstr.Float32OrString) *RetryBuilder { - b.model.Jitter = input - return b -} - -func (b *RetryBuilder) Build() Retry { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewScheduleBuilder() *ScheduleBuilder { - builder := &ScheduleBuilder{} - builder.model = Schedule{} - return builder -} - -type ScheduleBuilder struct { - model Schedule - cron *CronBuilder -} - -func (b *ScheduleBuilder) Interval(input string) *ScheduleBuilder { - b.model.Interval = input - return b -} - -func (b *ScheduleBuilder) Cron() *CronBuilder { - if b.cron == nil { - b.cron = NewCronBuilder() - } - return b.cron -} - -func (b *ScheduleBuilder) Timezone(input string) *ScheduleBuilder { - b.model.Timezone = input - return b -} - -func (b *ScheduleBuilder) Build() Schedule { - if b.cron != nil { - cron := b.cron.Build() - b.model.Cron = &cron - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSecretsBuilder() *SecretsBuilder { - builder := &SecretsBuilder{} - builder.model = Secrets{} - return builder -} - -type SecretsBuilder struct { - model Secrets -} - -func (b *SecretsBuilder) Build() Secrets { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepBuilder() *SleepBuilder { - builder := &SleepBuilder{} - builder.model = Sleep{} - return builder -} - -type SleepBuilder struct { - model Sleep -} - -func (b *SleepBuilder) Before(input string) *SleepBuilder { - b.model.Before = input - return b -} - -func (b *SleepBuilder) After(input string) *SleepBuilder { - b.model.After = input - return b -} - -func (b *SleepBuilder) Build() Sleep { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateBuilder() *SleepStateBuilder { - builder := &SleepStateBuilder{} - builder.model = SleepState{} - return builder -} - -type SleepStateBuilder struct { - model SleepState - timeouts *SleepStateTimeoutBuilder -} - -func (b *SleepStateBuilder) Duration(input string) *SleepStateBuilder { - b.model.Duration = input - return b -} - -func (b *SleepStateBuilder) Timeouts() *SleepStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSleepStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SleepStateBuilder) Build() SleepState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateTimeoutBuilder() *SleepStateTimeoutBuilder { - builder := &SleepStateTimeoutBuilder{} - builder.model = SleepStateTimeout{} - return builder -} - -type SleepStateTimeoutBuilder struct { - model SleepStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SleepStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SleepStateTimeoutBuilder) Build() SleepStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStartBuilder() *StartBuilder { - builder := &StartBuilder{} - builder.model = Start{} - return builder -} - -type StartBuilder struct { - model Start - schedule *ScheduleBuilder -} - -func (b *StartBuilder) StateName(input string) *StartBuilder { - b.model.StateName = input - return b -} - -func (b *StartBuilder) Schedule() *ScheduleBuilder { - if b.schedule == nil { - b.schedule = NewScheduleBuilder() - } - return b.schedule -} - -func (b *StartBuilder) Build() Start { - if b.schedule != nil { - schedule := b.schedule.Build() - b.model.Schedule = &schedule - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateBuilder() *StateBuilder { - builder := &StateBuilder{} - builder.model = State{} - builder.BaseStateBuilder = *NewBaseStateBuilder() - return builder -} - -type StateBuilder struct { - model State - BaseStateBuilder - *DelayStateBuilder - *EventStateBuilder - *OperationStateBuilder - *ParallelStateBuilder - *SwitchStateBuilder - *ForEachStateBuilder - *InjectStateBuilder - *CallbackStateBuilder - *SleepStateBuilder -} - -func (b *StateBuilder) ID(input string) *StateBuilder { - b.BaseStateBuilder.ID(input) - return b -} - -func (b *StateBuilder) Name(input string) *StateBuilder { - b.BaseStateBuilder.Name(input) - return b -} - -func (b *StateBuilder) Type(input StateType) *StateBuilder { - b.BaseStateBuilder.Type(input) - return b -} - -func (b *StateBuilder) CompensatedBy(input string) *StateBuilder { - b.BaseStateBuilder.CompensatedBy(input) - return b -} - -func (b *StateBuilder) UsedForCompensation(input bool) *StateBuilder { - b.BaseStateBuilder.UsedForCompensation(input) - return b -} - -func (b *StateBuilder) DelayState() *DelayStateBuilder { - if b.DelayStateBuilder == nil { - b.DelayStateBuilder = NewDelayStateBuilder() - } - return b.DelayStateBuilder -} - -func (b *StateBuilder) TimeDelay(input string) *StateBuilder { - b.DelayStateBuilder.TimeDelay(input) - return b -} - -func (b *StateBuilder) EventState() *EventStateBuilder { - if b.EventStateBuilder == nil { - b.EventStateBuilder = NewEventStateBuilder() - } - return b.EventStateBuilder -} - -func (b *StateBuilder) Exclusive(input bool) *StateBuilder { - b.EventStateBuilder.Exclusive(input) - return b -} - -func (b *StateBuilder) OperationState() *OperationStateBuilder { - if b.OperationStateBuilder == nil { - b.OperationStateBuilder = NewOperationStateBuilder() - } - return b.OperationStateBuilder -} - -func (b *StateBuilder) ActionMode(input ActionMode) *StateBuilder { - b.OperationStateBuilder.ActionMode(input) - return b -} - -func (b *StateBuilder) ParallelState() *ParallelStateBuilder { - if b.ParallelStateBuilder == nil { - b.ParallelStateBuilder = NewParallelStateBuilder() - } - return b.ParallelStateBuilder -} - -func (b *StateBuilder) CompletionType(input CompletionType) *StateBuilder { - b.ParallelStateBuilder.CompletionType(input) - return b -} - -func (b *StateBuilder) SwitchState() *SwitchStateBuilder { - if b.SwitchStateBuilder == nil { - b.SwitchStateBuilder = NewSwitchStateBuilder() - } - return b.SwitchStateBuilder -} - -func (b *StateBuilder) ForEachState() *ForEachStateBuilder { - if b.ForEachStateBuilder == nil { - b.ForEachStateBuilder = NewForEachStateBuilder() - } - return b.ForEachStateBuilder -} - -func (b *StateBuilder) InputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.InputCollection(input) - return b -} - -func (b *StateBuilder) OutputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.OutputCollection(input) - return b -} - -func (b *StateBuilder) IterationParam(input string) *StateBuilder { - b.ForEachStateBuilder.IterationParam(input) - return b -} - -func (b *StateBuilder) Mode(input ForEachModeType) *StateBuilder { - b.ForEachStateBuilder.Mode(input) - return b -} - -func (b *StateBuilder) InjectState() *InjectStateBuilder { - if b.InjectStateBuilder == nil { - b.InjectStateBuilder = NewInjectStateBuilder() - } - return b.InjectStateBuilder -} - -func (b *StateBuilder) CallbackState() *CallbackStateBuilder { - if b.CallbackStateBuilder == nil { - b.CallbackStateBuilder = NewCallbackStateBuilder() - } - return b.CallbackStateBuilder -} - -func (b *StateBuilder) EventRef(input string) *StateBuilder { - b.CallbackStateBuilder.EventRef(input) - return b -} - -func (b *StateBuilder) SleepState() *SleepStateBuilder { - if b.SleepStateBuilder == nil { - b.SleepStateBuilder = NewSleepStateBuilder() - } - return b.SleepStateBuilder -} - -func (b *StateBuilder) Duration(input string) *StateBuilder { - b.SleepStateBuilder.Duration(input) - return b -} - -func (b *StateBuilder) Build() State { - b.model.BaseState = b.BaseStateBuilder.Build() - if b.DelayStateBuilder != nil { - delaystate := b.DelayStateBuilder.Build() - b.model.DelayState = &delaystate - } - if b.EventStateBuilder != nil { - eventstate := b.EventStateBuilder.Build() - b.model.EventState = &eventstate - } - if b.OperationStateBuilder != nil { - operationstate := b.OperationStateBuilder.Build() - b.model.OperationState = &operationstate - } - if b.ParallelStateBuilder != nil { - parallelstate := b.ParallelStateBuilder.Build() - b.model.ParallelState = ¶llelstate - } - if b.SwitchStateBuilder != nil { - switchstate := b.SwitchStateBuilder.Build() - b.model.SwitchState = &switchstate - } - if b.ForEachStateBuilder != nil { - foreachstate := b.ForEachStateBuilder.Build() - b.model.ForEachState = &foreachstate - } - if b.InjectStateBuilder != nil { - injectstate := b.InjectStateBuilder.Build() - b.model.InjectState = &injectstate - } - if b.CallbackStateBuilder != nil { - callbackstate := b.CallbackStateBuilder.Build() - b.model.CallbackState = &callbackstate - } - if b.SleepStateBuilder != nil { - sleepstate := b.SleepStateBuilder.Build() - b.model.SleepState = &sleepstate - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateDataFilterBuilder() *StateDataFilterBuilder { - builder := &StateDataFilterBuilder{} - builder.model = StateDataFilter{} - return builder -} - -type StateDataFilterBuilder struct { - model StateDataFilter -} - -func (b *StateDataFilterBuilder) Input(input string) *StateDataFilterBuilder { - b.model.Input = input - return b -} - -func (b *StateDataFilterBuilder) Output(input string) *StateDataFilterBuilder { - b.model.Output = input - return b -} - -func (b *StateDataFilterBuilder) Build() StateDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateExecTimeoutBuilder() *StateExecTimeoutBuilder { - builder := &StateExecTimeoutBuilder{} - builder.model = StateExecTimeout{} - return builder -} - -type StateExecTimeoutBuilder struct { - model StateExecTimeout -} - -func (b *StateExecTimeoutBuilder) Single(input string) *StateExecTimeoutBuilder { - b.model.Single = input - return b -} - -func (b *StateExecTimeoutBuilder) Total(input string) *StateExecTimeoutBuilder { - b.model.Total = input - return b -} - -func (b *StateExecTimeoutBuilder) Build() StateExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStatesBuilder() *StatesBuilder { - builder := &StatesBuilder{} - builder.model = States{} - return builder -} - -type StatesBuilder struct { - model States -} - -func (b *StatesBuilder) Build() States { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateBuilder() *SwitchStateBuilder { - builder := &SwitchStateBuilder{} - builder.model = SwitchState{} - builder.defaultcondition = NewDefaultConditionBuilder() - builder.eventconditions = []*EventConditionBuilder{} - builder.dataconditions = []*DataConditionBuilder{} - return builder -} - -type SwitchStateBuilder struct { - model SwitchState - defaultcondition *DefaultConditionBuilder - eventconditions []*EventConditionBuilder - dataconditions []*DataConditionBuilder - timeouts *SwitchStateTimeoutBuilder -} - -func (b *SwitchStateBuilder) DefaultCondition() *DefaultConditionBuilder { - return b.defaultcondition -} - -func (b *SwitchStateBuilder) AddEventConditions() *EventConditionBuilder { - builder := NewEventConditionBuilder() - b.eventconditions = append(b.eventconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveEventConditions(remove *EventConditionBuilder) { - for i, val := range b.eventconditions { - if val == remove { - b.eventconditions[i] = b.eventconditions[len(b.eventconditions)-1] - b.eventconditions = b.eventconditions[:len(b.eventconditions)-1] - } - } -} -func (b *SwitchStateBuilder) AddDataConditions() *DataConditionBuilder { - builder := NewDataConditionBuilder() - b.dataconditions = append(b.dataconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveDataConditions(remove *DataConditionBuilder) { - for i, val := range b.dataconditions { - if val == remove { - b.dataconditions[i] = b.dataconditions[len(b.dataconditions)-1] - b.dataconditions = b.dataconditions[:len(b.dataconditions)-1] - } - } -} -func (b *SwitchStateBuilder) Timeouts() *SwitchStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSwitchStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SwitchStateBuilder) Build() SwitchState { - b.model.DefaultCondition = b.defaultcondition.Build() - b.model.EventConditions = []EventCondition{} - for _, v := range b.eventconditions { - b.model.EventConditions = append(b.model.EventConditions, v.Build()) - } - b.model.DataConditions = []DataCondition{} - for _, v := range b.dataconditions { - b.model.DataConditions = append(b.model.DataConditions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateTimeoutBuilder() *SwitchStateTimeoutBuilder { - builder := &SwitchStateTimeoutBuilder{} - builder.model = SwitchStateTimeout{} - return builder -} - -type SwitchStateTimeoutBuilder struct { - model SwitchStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SwitchStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SwitchStateTimeoutBuilder) EventTimeout(input string) *SwitchStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *SwitchStateTimeoutBuilder) Build() SwitchStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTimeoutsBuilder() *TimeoutsBuilder { - builder := &TimeoutsBuilder{} - builder.model = Timeouts{} - return builder -} - -type TimeoutsBuilder struct { - model Timeouts - workflowexectimeout *WorkflowExecTimeoutBuilder - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *TimeoutsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - if b.workflowexectimeout == nil { - b.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - } - return b.workflowexectimeout -} - -func (b *TimeoutsBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *TimeoutsBuilder) ActionExecTimeout(input string) *TimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) BranchExecTimeout(input string) *TimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) EventTimeout(input string) *TimeoutsBuilder { - b.model.EventTimeout = input - return b -} - -func (b *TimeoutsBuilder) Build() Timeouts { - if b.workflowexectimeout != nil { - workflowexectimeout := b.workflowexectimeout.Build() - b.model.WorkflowExecTimeout = &workflowexectimeout - } - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTransitionBuilder() *TransitionBuilder { - builder := &TransitionBuilder{} - builder.model = Transition{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type TransitionBuilder struct { - model Transition - stateparent *StateBuilder - produceevents []*ProduceEventBuilder -} - -func (b *TransitionBuilder) stateParent() *StateBuilder { - if b.stateparent == nil { - b.stateparent = NewStateBuilder() - } - return b.stateparent -} - -func (b *TransitionBuilder) NextState(input string) *TransitionBuilder { - b.model.NextState = input - return b -} - -func (b *TransitionBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *TransitionBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *TransitionBuilder) Compensate(input bool) *TransitionBuilder { - b.model.Compensate = input - return b -} - -func (b *TransitionBuilder) Build() Transition { - if b.stateparent != nil { - stateparent := b.stateparent.Build() - b.model.stateParent = &stateparent - } - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowBuilder() *WorkflowBuilder { - builder := &WorkflowBuilder{} - builder.model = Workflow{} - builder.BaseWorkflowBuilder = *NewBaseWorkflowBuilder() - builder.states = []*StateBuilder{} - builder.events = []*EventBuilder{} - builder.functions = []*FunctionBuilder{} - builder.retries = []*RetryBuilder{} - return builder -} - -type WorkflowBuilder struct { - model Workflow - BaseWorkflowBuilder - states []*StateBuilder - events []*EventBuilder - functions []*FunctionBuilder - retries []*RetryBuilder -} - -func (b *WorkflowBuilder) ID(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.ID(input) - return b -} - -func (b *WorkflowBuilder) Key(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Key(input) - return b -} - -func (b *WorkflowBuilder) Name(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Name(input) - return b -} - -func (b *WorkflowBuilder) Description(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Description(input) - return b -} - -func (b *WorkflowBuilder) Version(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Version(input) - return b -} - -func (b *WorkflowBuilder) SpecVersion(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.SpecVersion(input) - return b -} - -func (b *WorkflowBuilder) ExpressionLang(input ExpressionLangType) *WorkflowBuilder { - b.BaseWorkflowBuilder.ExpressionLang(input) - return b -} - -func (b *WorkflowBuilder) KeepActive(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.KeepActive(input) - return b -} - -func (b *WorkflowBuilder) AutoRetries(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.AutoRetries(input) - return b -} - -func (b *WorkflowBuilder) AddStates() *StateBuilder { - builder := NewStateBuilder() - b.states = append(b.states, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveStates(remove *StateBuilder) { - for i, val := range b.states { - if val == remove { - b.states[i] = b.states[len(b.states)-1] - b.states = b.states[:len(b.states)-1] - } - } -} -func (b *WorkflowBuilder) AddEvents() *EventBuilder { - builder := NewEventBuilder() - b.events = append(b.events, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveEvents(remove *EventBuilder) { - for i, val := range b.events { - if val == remove { - b.events[i] = b.events[len(b.events)-1] - b.events = b.events[:len(b.events)-1] - } - } -} -func (b *WorkflowBuilder) AddFunctions() *FunctionBuilder { - builder := NewFunctionBuilder() - b.functions = append(b.functions, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveFunctions(remove *FunctionBuilder) { - for i, val := range b.functions { - if val == remove { - b.functions[i] = b.functions[len(b.functions)-1] - b.functions = b.functions[:len(b.functions)-1] - } - } -} -func (b *WorkflowBuilder) AddRetries() *RetryBuilder { - builder := NewRetryBuilder() - b.retries = append(b.retries, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveRetries(remove *RetryBuilder) { - for i, val := range b.retries { - if val == remove { - b.retries[i] = b.retries[len(b.retries)-1] - b.retries = b.retries[:len(b.retries)-1] - } - } -} -func (b *WorkflowBuilder) Build() Workflow { - b.model.BaseWorkflow = b.BaseWorkflowBuilder.Build() - b.model.States = []State{} - for _, v := range b.states { - b.model.States = append(b.model.States, v.Build()) - } - b.model.Events = []Event{} - for _, v := range b.events { - b.model.Events = append(b.model.Events, v.Build()) - } - b.model.Functions = []Function{} - for _, v := range b.functions { - b.model.Functions = append(b.model.Functions, v.Build()) - } - b.model.Retries = []Retry{} - for _, v := range b.retries { - b.model.Retries = append(b.model.Retries, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowExecTimeoutBuilder() *WorkflowExecTimeoutBuilder { - builder := &WorkflowExecTimeoutBuilder{} - builder.model = WorkflowExecTimeout{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowExecTimeoutBuilder struct { - model WorkflowExecTimeout -} - -func (b *WorkflowExecTimeoutBuilder) Duration(input string) *WorkflowExecTimeoutBuilder { - b.model.Duration = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Interrupt(input bool) *WorkflowExecTimeoutBuilder { - b.model.Interrupt = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) RunBefore(input string) *WorkflowExecTimeoutBuilder { - b.model.RunBefore = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Build() WorkflowExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowRefBuilder() *WorkflowRefBuilder { - builder := &WorkflowRefBuilder{} - builder.model = WorkflowRef{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowRefBuilder struct { - model WorkflowRef -} - -func (b *WorkflowRefBuilder) WorkflowID(input string) *WorkflowRefBuilder { - b.model.WorkflowID = input - return b -} - -func (b *WorkflowRefBuilder) Version(input string) *WorkflowRefBuilder { - b.model.Version = input - return b -} - -func (b *WorkflowRefBuilder) Invoke(input InvokeKind) *WorkflowRefBuilder { - b.model.Invoke = input - return b -} - -func (b *WorkflowRefBuilder) OnParentComplete(input OnParentCompleteType) *WorkflowRefBuilder { - b.model.OnParentComplete = input - return b -} - -func (b *WorkflowRefBuilder) Build() WorkflowRef { - return b.model -} diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go deleted file mode 100644 index 0fb2566..0000000 --- a/model/zz_generated.deepcopy.go +++ /dev/null @@ -1,1837 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by deepcopy-gen. DO NOT EDIT. - -package model - -import ( - json "encoding/json" - - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Action) DeepCopyInto(out *Action) { - *out = *in - if in.FunctionRef != nil { - in, out := &in.FunctionRef, &out.FunctionRef - *out = new(FunctionRef) - (*in).DeepCopyInto(*out) - } - if in.EventRef != nil { - in, out := &in.EventRef, &out.EventRef - *out = new(EventRef) - (*in).DeepCopyInto(*out) - } - if in.SubFlowRef != nil { - in, out := &in.SubFlowRef, &out.SubFlowRef - *out = new(WorkflowRef) - **out = **in - } - if in.Sleep != nil { - in, out := &in.Sleep, &out.Sleep - *out = new(Sleep) - **out = **in - } - if in.NonRetryableErrors != nil { - in, out := &in.NonRetryableErrors, &out.NonRetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.RetryableErrors != nil { - in, out := &in.RetryableErrors, &out.RetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - out.ActionDataFilter = in.ActionDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Action. -func (in *Action) DeepCopy() *Action { - if in == nil { - return nil - } - out := new(Action) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ActionDataFilter) DeepCopyInto(out *ActionDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ActionDataFilter. -func (in *ActionDataFilter) DeepCopy() *ActionDataFilter { - if in == nil { - return nil - } - out := new(ActionDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Auth) DeepCopyInto(out *Auth) { - *out = *in - in.Properties.DeepCopyInto(&out.Properties) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auth. -func (in *Auth) DeepCopy() *Auth { - if in == nil { - return nil - } - out := new(Auth) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AuthProperties) DeepCopyInto(out *AuthProperties) { - *out = *in - if in.Basic != nil { - in, out := &in.Basic, &out.Basic - *out = new(BasicAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.Bearer != nil { - in, out := &in.Bearer, &out.Bearer - *out = new(BearerAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.OAuth2 != nil { - in, out := &in.OAuth2, &out.OAuth2 - *out = new(OAuth2AuthProperties) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthProperties. -func (in *AuthProperties) DeepCopy() *AuthProperties { - if in == nil { - return nil - } - out := new(AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Auths) DeepCopyInto(out *Auths) { - { - in := &in - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. -func (in Auths) DeepCopy() Auths { - if in == nil { - return nil - } - out := new(Auths) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseState) DeepCopyInto(out *BaseState) { - *out = *in - if in.OnErrors != nil { - in, out := &in.OnErrors, &out.OnErrors - *out = make([]OnError, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.StateDataFilter != nil { - in, out := &in.StateDataFilter, &out.StateDataFilter - *out = new(StateDataFilter) - **out = **in - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = new(Metadata) - if **in != nil { - in, out := *in, *out - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseState. -func (in *BaseState) DeepCopy() *BaseState { - if in == nil { - return nil - } - out := new(BaseState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(Start) - (*in).DeepCopyInto(*out) - } - if in.Annotations != nil { - in, out := &in.Annotations, &out.Annotations - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DataInputSchema != nil { - in, out := &in.DataInputSchema, &out.DataInputSchema - *out = new(DataInputSchema) - (*in).DeepCopyInto(*out) - } - if in.Secrets != nil { - in, out := &in.Secrets, &out.Secrets - *out = make(Secrets, len(*in)) - copy(*out, *in) - } - if in.Constants != nil { - in, out := &in.Constants, &out.Constants - *out = new(Constants) - (*in).DeepCopyInto(*out) - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(Timeouts) - (*in).DeepCopyInto(*out) - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(Errors, len(*in)) - copy(*out, *in) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Auth != nil { - in, out := &in.Auth, &out.Auth - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseWorkflow. -func (in *BaseWorkflow) DeepCopy() *BaseWorkflow { - if in == nil { - return nil - } - out := new(BaseWorkflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BasicAuthProperties) DeepCopyInto(out *BasicAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAuthProperties. -func (in *BasicAuthProperties) DeepCopy() *BasicAuthProperties { - if in == nil { - return nil - } - out := new(BasicAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BearerAuthProperties) DeepCopyInto(out *BearerAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BearerAuthProperties. -func (in *BearerAuthProperties) DeepCopy() *BearerAuthProperties { - if in == nil { - return nil - } - out := new(BearerAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Branch) DeepCopyInto(out *Branch) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(BranchTimeouts) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Branch. -func (in *Branch) DeepCopy() *Branch { - if in == nil { - return nil - } - out := new(Branch) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BranchTimeouts) DeepCopyInto(out *BranchTimeouts) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BranchTimeouts. -func (in *BranchTimeouts) DeepCopy() *BranchTimeouts { - if in == nil { - return nil - } - out := new(BranchTimeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackState) DeepCopyInto(out *CallbackState) { - *out = *in - in.Action.DeepCopyInto(&out.Action) - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(CallbackStateTimeout) - (*in).DeepCopyInto(*out) - } - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackState. -func (in *CallbackState) DeepCopy() *CallbackState { - if in == nil { - return nil - } - out := new(CallbackState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackStateTimeout) DeepCopyInto(out *CallbackStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackStateTimeout. -func (in *CallbackStateTimeout) DeepCopy() *CallbackStateTimeout { - if in == nil { - return nil - } - out := new(CallbackStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Common) DeepCopyInto(out *Common) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Common. -func (in *Common) DeepCopy() *Common { - if in == nil { - return nil - } - out := new(Common) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Constants) DeepCopyInto(out *Constants) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Constants. -func (in *Constants) DeepCopy() *Constants { - if in == nil { - return nil - } - out := new(Constants) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in ConstantsData) DeepCopyInto(out *ConstantsData) { - { - in := &in - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConstantsData. -func (in ConstantsData) DeepCopy() ConstantsData { - if in == nil { - return nil - } - out := new(ConstantsData) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ContinueAs) DeepCopyInto(out *ContinueAs) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - out.WorkflowExecTimeout = in.WorkflowExecTimeout - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ContinueAs. -func (in *ContinueAs) DeepCopy() *ContinueAs { - if in == nil { - return nil - } - out := new(ContinueAs) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Correlation) DeepCopyInto(out *Correlation) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation. -func (in *Correlation) DeepCopy() *Correlation { - if in == nil { - return nil - } - out := new(Correlation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Cron) DeepCopyInto(out *Cron) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cron. -func (in *Cron) DeepCopy() *Cron { - if in == nil { - return nil - } - out := new(Cron) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataCondition) DeepCopyInto(out *DataCondition) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCondition. -func (in *DataCondition) DeepCopy() *DataCondition { - if in == nil { - return nil - } - out := new(DataCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { - *out = *in - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(Object) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSchema. -func (in *DataInputSchema) DeepCopy() *DataInputSchema { - if in == nil { - return nil - } - out := new(DataInputSchema) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DefaultCondition) DeepCopyInto(out *DefaultCondition) { - *out = *in - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultCondition. -func (in *DefaultCondition) DeepCopy() *DefaultCondition { - if in == nil { - return nil - } - out := new(DefaultCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DelayState) DeepCopyInto(out *DelayState) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DelayState. -func (in *DelayState) DeepCopy() *DelayState { - if in == nil { - return nil - } - out := new(DelayState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *End) DeepCopyInto(out *End) { - *out = *in - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ContinueAs != nil { - in, out := &in.ContinueAs, &out.ContinueAs - *out = new(ContinueAs) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new End. -func (in *End) DeepCopy() *End { - if in == nil { - return nil - } - out := new(End) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Error) DeepCopyInto(out *Error) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Error. -func (in *Error) DeepCopy() *Error { - if in == nil { - return nil - } - out := new(Error) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Errors) DeepCopyInto(out *Errors) { - { - in := &in - *out = make(Errors, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Errors. -func (in Errors) DeepCopy() Errors { - if in == nil { - return nil - } - out := new(Errors) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Event) DeepCopyInto(out *Event) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Correlation != nil { - in, out := &in.Correlation, &out.Correlation - *out = make([]Correlation, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Event. -func (in *Event) DeepCopy() *Event { - if in == nil { - return nil - } - out := new(Event) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventCondition) DeepCopyInto(out *EventCondition) { - *out = *in - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventCondition. -func (in *EventCondition) DeepCopy() *EventCondition { - if in == nil { - return nil - } - out := new(EventCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in EventConditions) DeepCopyInto(out *EventConditions) { - { - in := &in - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventConditions. -func (in EventConditions) DeepCopy() EventConditions { - if in == nil { - return nil - } - out := new(EventConditions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventDataFilter) DeepCopyInto(out *EventDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDataFilter. -func (in *EventDataFilter) DeepCopy() *EventDataFilter { - if in == nil { - return nil - } - out := new(EventDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventRef) DeepCopyInto(out *EventRef) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = new(Object) - (*in).DeepCopyInto(*out) - } - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventRef. -func (in *EventRef) DeepCopy() *EventRef { - if in == nil { - return nil - } - out := new(EventRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventState) DeepCopyInto(out *EventState) { - *out = *in - if in.OnEvents != nil { - in, out := &in.OnEvents, &out.OnEvents - *out = make([]OnEvents, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(EventStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventState. -func (in *EventState) DeepCopy() *EventState { - if in == nil { - return nil - } - out := new(EventState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventStateTimeout) DeepCopyInto(out *EventStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventStateTimeout. -func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { - if in == nil { - return nil - } - out := new(EventStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Events) DeepCopyInto(out *Events) { - { - in := &in - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Events. -func (in Events) DeepCopy() Events { - if in == nil { - return nil - } - out := new(Events) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachState) DeepCopyInto(out *ForEachState) { - *out = *in - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(intstr.IntOrString) - **out = **in - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ForEachStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachState. -func (in *ForEachState) DeepCopy() *ForEachState { - if in == nil { - return nil - } - out := new(ForEachState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachStateTimeout) DeepCopyInto(out *ForEachStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachStateTimeout. -func (in *ForEachStateTimeout) DeepCopy() *ForEachStateTimeout { - if in == nil { - return nil - } - out := new(ForEachStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Function) DeepCopyInto(out *Function) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Function. -func (in *Function) DeepCopy() *Function { - if in == nil { - return nil - } - out := new(Function) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FunctionRef) DeepCopyInto(out *FunctionRef) { - *out = *in - if in.Arguments != nil { - in, out := &in.Arguments, &out.Arguments - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FunctionRef. -func (in *FunctionRef) DeepCopy() *FunctionRef { - if in == nil { - return nil - } - out := new(FunctionRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Functions) DeepCopyInto(out *Functions) { - { - in := &in - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Functions. -func (in Functions) DeepCopy() Functions { - if in == nil { - return nil - } - out := new(Functions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectState) DeepCopyInto(out *InjectState) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(InjectStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectState. -func (in *InjectState) DeepCopy() *InjectState { - if in == nil { - return nil - } - out := new(InjectState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectStateTimeout) DeepCopyInto(out *InjectStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectStateTimeout. -func (in *InjectStateTimeout) DeepCopy() *InjectStateTimeout { - if in == nil { - return nil - } - out := new(InjectStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Metadata) DeepCopyInto(out *Metadata) { - { - in := &in - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Metadata. -func (in Metadata) DeepCopy() Metadata { - if in == nil { - return nil - } - out := new(Metadata) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OAuth2AuthProperties) DeepCopyInto(out *OAuth2AuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Scopes != nil { - in, out := &in.Scopes, &out.Scopes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Audiences != nil { - in, out := &in.Audiences, &out.Audiences - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2AuthProperties. -func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { - if in == nil { - return nil - } - out := new(OAuth2AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Object) DeepCopyInto(out *Object) { - *out = *in - if in.MapValue != nil { - in, out := &in.MapValue, &out.MapValue - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.SliceValue != nil { - in, out := &in.SliceValue, &out.SliceValue - *out = make([]Object, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Object. -func (in *Object) DeepCopy() *Object { - if in == nil { - return nil - } - out := new(Object) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnError) DeepCopyInto(out *OnError) { - *out = *in - if in.ErrorRefs != nil { - in, out := &in.ErrorRefs, &out.ErrorRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnError. -func (in *OnError) DeepCopy() *OnError { - if in == nil { - return nil - } - out := new(OnError) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnEvents) DeepCopyInto(out *OnEvents) { - *out = *in - if in.EventRefs != nil { - in, out := &in.EventRefs, &out.EventRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.EventDataFilter = in.EventDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnEvents. -func (in *OnEvents) DeepCopy() *OnEvents { - if in == nil { - return nil - } - out := new(OnEvents) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationState) DeepCopyInto(out *OperationState) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(OperationStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationState. -func (in *OperationState) DeepCopy() *OperationState { - if in == nil { - return nil - } - out := new(OperationState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationStateTimeout) DeepCopyInto(out *OperationStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationStateTimeout. -func (in *OperationStateTimeout) DeepCopy() *OperationStateTimeout { - if in == nil { - return nil - } - out := new(OperationStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelState) DeepCopyInto(out *ParallelState) { - *out = *in - if in.Branches != nil { - in, out := &in.Branches, &out.Branches - *out = make([]Branch, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.NumCompleted = in.NumCompleted - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ParallelStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelState. -func (in *ParallelState) DeepCopy() *ParallelState { - if in == nil { - return nil - } - out := new(ParallelState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelStateTimeout) DeepCopyInto(out *ParallelStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelStateTimeout. -func (in *ParallelStateTimeout) DeepCopy() *ParallelStateTimeout { - if in == nil { - return nil - } - out := new(ParallelStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ProduceEvent) DeepCopyInto(out *ProduceEvent) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProduceEvent. -func (in *ProduceEvent) DeepCopy() *ProduceEvent { - if in == nil { - return nil - } - out := new(ProduceEvent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Retries) DeepCopyInto(out *Retries) { - { - in := &in - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retries. -func (in Retries) DeepCopy() Retries { - if in == nil { - return nil - } - out := new(Retries) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Retry) DeepCopyInto(out *Retry) { - *out = *in - if in.Multiplier != nil { - in, out := &in.Multiplier, &out.Multiplier - *out = new(floatstr.Float32OrString) - **out = **in - } - out.MaxAttempts = in.MaxAttempts - out.Jitter = in.Jitter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retry. -func (in *Retry) DeepCopy() *Retry { - if in == nil { - return nil - } - out := new(Retry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Schedule) DeepCopyInto(out *Schedule) { - *out = *in - if in.Cron != nil { - in, out := &in.Cron, &out.Cron - *out = new(Cron) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schedule. -func (in *Schedule) DeepCopy() *Schedule { - if in == nil { - return nil - } - out := new(Schedule) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Secrets) DeepCopyInto(out *Secrets) { - { - in := &in - *out = make(Secrets, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Secrets. -func (in Secrets) DeepCopy() Secrets { - if in == nil { - return nil - } - out := new(Secrets) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Sleep) DeepCopyInto(out *Sleep) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Sleep. -func (in *Sleep) DeepCopy() *Sleep { - if in == nil { - return nil - } - out := new(Sleep) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepState) DeepCopyInto(out *SleepState) { - *out = *in - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SleepStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepState. -func (in *SleepState) DeepCopy() *SleepState { - if in == nil { - return nil - } - out := new(SleepState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepStateTimeout) DeepCopyInto(out *SleepStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepStateTimeout. -func (in *SleepStateTimeout) DeepCopy() *SleepStateTimeout { - if in == nil { - return nil - } - out := new(SleepStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Start) DeepCopyInto(out *Start) { - *out = *in - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(Schedule) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Start. -func (in *Start) DeepCopy() *Start { - if in == nil { - return nil - } - out := new(Start) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *State) DeepCopyInto(out *State) { - *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) - if in.DelayState != nil { - in, out := &in.DelayState, &out.DelayState - *out = new(DelayState) - **out = **in - } - if in.EventState != nil { - in, out := &in.EventState, &out.EventState - *out = new(EventState) - (*in).DeepCopyInto(*out) - } - if in.OperationState != nil { - in, out := &in.OperationState, &out.OperationState - *out = new(OperationState) - (*in).DeepCopyInto(*out) - } - if in.ParallelState != nil { - in, out := &in.ParallelState, &out.ParallelState - *out = new(ParallelState) - (*in).DeepCopyInto(*out) - } - if in.SwitchState != nil { - in, out := &in.SwitchState, &out.SwitchState - *out = new(SwitchState) - (*in).DeepCopyInto(*out) - } - if in.ForEachState != nil { - in, out := &in.ForEachState, &out.ForEachState - *out = new(ForEachState) - (*in).DeepCopyInto(*out) - } - if in.InjectState != nil { - in, out := &in.InjectState, &out.InjectState - *out = new(InjectState) - (*in).DeepCopyInto(*out) - } - if in.CallbackState != nil { - in, out := &in.CallbackState, &out.CallbackState - *out = new(CallbackState) - (*in).DeepCopyInto(*out) - } - if in.SleepState != nil { - in, out := &in.SleepState, &out.SleepState - *out = new(SleepState) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new State. -func (in *State) DeepCopy() *State { - if in == nil { - return nil - } - out := new(State) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateDataFilter) DeepCopyInto(out *StateDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateDataFilter. -func (in *StateDataFilter) DeepCopy() *StateDataFilter { - if in == nil { - return nil - } - out := new(StateDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateExecTimeout) DeepCopyInto(out *StateExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateExecTimeout. -func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { - if in == nil { - return nil - } - out := new(StateExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in States) DeepCopyInto(out *States) { - { - in := &in - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new States. -func (in States) DeepCopy() States { - if in == nil { - return nil - } - out := new(States) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchState) DeepCopyInto(out *SwitchState) { - *out = *in - in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) - if in.EventConditions != nil { - in, out := &in.EventConditions, &out.EventConditions - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.DataConditions != nil { - in, out := &in.DataConditions, &out.DataConditions - *out = make([]DataCondition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SwitchStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchState. -func (in *SwitchState) DeepCopy() *SwitchState { - if in == nil { - return nil - } - out := new(SwitchState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchStateTimeout) DeepCopyInto(out *SwitchStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchStateTimeout. -func (in *SwitchStateTimeout) DeepCopy() *SwitchStateTimeout { - if in == nil { - return nil - } - out := new(SwitchStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Timeouts) DeepCopyInto(out *Timeouts) { - *out = *in - if in.WorkflowExecTimeout != nil { - in, out := &in.WorkflowExecTimeout, &out.WorkflowExecTimeout - *out = new(WorkflowExecTimeout) - **out = **in - } - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Timeouts. -func (in *Timeouts) DeepCopy() *Timeouts { - if in == nil { - return nil - } - out := new(Timeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Transition) DeepCopyInto(out *Transition) { - *out = *in - if in.stateParent != nil { - in, out := &in.stateParent, &out.stateParent - *out = new(State) - (*in).DeepCopyInto(*out) - } - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Transition. -func (in *Transition) DeepCopy() *Transition { - if in == nil { - return nil - } - out := new(Transition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ValidatorContext) DeepCopyInto(out *ValidatorContext) { - *out = *in - if in.States != nil { - in, out := &in.States, &out.States - *out = make(map[string]State, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(map[string]Function, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(map[string]Event, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(map[string]Retry, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(map[string]Error, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidatorContext. -func (in *ValidatorContext) DeepCopy() *ValidatorContext { - if in == nil { - return nil - } - out := new(ValidatorContext) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Workflow) DeepCopyInto(out *Workflow) { - *out = *in - in.BaseWorkflow.DeepCopyInto(&out.BaseWorkflow) - if in.States != nil { - in, out := &in.States, &out.States - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Workflow. -func (in *Workflow) DeepCopy() *Workflow { - if in == nil { - return nil - } - out := new(Workflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowExecTimeout) DeepCopyInto(out *WorkflowExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowExecTimeout. -func (in *WorkflowExecTimeout) DeepCopy() *WorkflowExecTimeout { - if in == nil { - return nil - } - out := new(WorkflowExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowRef) DeepCopyInto(out *WorkflowRef) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowRef. -func (in *WorkflowRef) DeepCopy() *WorkflowRef { - if in == nil { - return nil - } - out := new(WorkflowRef) - in.DeepCopyInto(out) - return out -} diff --git a/parser/cmd/main.go b/parser/cmd/main.go new file mode 100644 index 0000000..e811696 --- /dev/null +++ b/parser/cmd/main.go @@ -0,0 +1,67 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/parser" + "os" + "path/filepath" +) + +func main() { + if len(os.Args) < 2 { + fmt.Println("Usage: go run main.go ") + os.Exit(1) + } + + baseDir := os.Args[1] + supportedExt := []string{".json", ".yaml", ".yml"} + errCount := 0 + + err := filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + for _, ext := range supportedExt { + if filepath.Ext(path) == ext { + fmt.Printf("Validating: %s\n", path) + _, err := parser.FromFile(path) + if err != nil { + fmt.Printf("Validation failed for %s: %v\n", path, err) + errCount++ + } else { + fmt.Printf("Validation succeeded for %s\n", path) + } + break + } + } + } + return nil + }) + + if err != nil { + fmt.Printf("Error walking the path %s: %v\n", baseDir, err) + os.Exit(1) + } + + if errCount > 0 { + fmt.Printf("Validation failed for %d file(s).\n", errCount) + os.Exit(1) + } + + fmt.Println("All workflows validated successfully.") +} diff --git a/parser/parser.go b/parser/parser.go index 7b7ad93..3707132 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -21,10 +21,9 @@ import ( "path/filepath" "strings" - "sigs.k8s.io/yaml" + "github.com/serverlessworkflow/sdk-go/v3/model" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" ) const ( @@ -51,9 +50,9 @@ func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { return nil, err } - ctx := model.NewValidatorContext(workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, val.WorkflowError(err) + err = model.GetValidator().Struct(workflow) + if err != nil { + return nil, err } return workflow, nil } diff --git a/parser/parser_test.go b/parser/parser_test.go index daf6608..9852d5f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -15,1078 +15,131 @@ package parser import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/test" - "github.com/serverlessworkflow/sdk-go/v2/util" ) -func TestBasicValidation(t *testing.T) { - rootPath := "./testdata/workflows" - files, err := os.ReadDir(rootPath) +func TestFromYAMLSource(t *testing.T) { + source := []byte(` +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com +`) + workflow, err := FromYAMLSource(source) assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) +} - util.SetIncludePaths(append(util.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) - - for _, file := range files { - if !file.IsDir() { - path := filepath.Join(rootPath, file.Name()) - workflow, err := FromFile(path) - - if assert.NoError(t, err, "Test File %s", path) { - assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) - assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) +func TestFromJSONSource(t *testing.T) { + source := []byte(`{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } } } - } -} - -func TestCustomValidators(t *testing.T) { - rootPath := "./testdata/workflows/witherrors" - files, err := os.ReadDir(rootPath) + ] +}`) + workflow, err := FromJSONSource(source) assert.NoError(t, err) - for _, file := range files { - if !file.IsDir() { - _, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.Error(t, err, "Test File %s", file.Name()) - } - } + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) } func TestFromFile(t *testing.T) { - files := []struct { - name string - f func(*testing.T, *model.Workflow) + tests := []struct { + name string + filePath string + expectError bool }{ { - "./testdata/workflows/greetings.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0].OperationState) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/actiondata-defaultvalue.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "greeting", w.ID) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.Equal(t, true, w.States[0].OperationState.Actions[0].ActionDataFilter.UseResults) - assert.Equal(t, "greeting", w.States[0].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/greetings.sw.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.IsType(t, "idx", w.States[0].ID) - assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.NotNil(t, w.States[0].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbaseddataandswitch.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.Equal(t, "Start", w.States[0].Name) - assert.Equal(t, "CheckVisaStatus", w.States[1].Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].SwitchState) - assert.Equal(t, "PT1H", w.States[1].SwitchState.Timeouts.EventTimeout) - assert.Nil(t, w.States[1].End) - assert.NotNil(t, w.States[2].End) - assert.True(t, w.States[2].End.Terminate) - }, - }, { - "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { - operationState := w.States[0].OperationState - assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreetingexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) - assert.Equal(t, false, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.p.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - }, - }, { - "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotEmpty(t, w.States[0].EventConditions) - assert.Equal(t, "CheckVisaStatus", w.States[0].Name) - assert.IsType(t, model.EventCondition{}, w.States[0].EventConditions[0]) - }, - }, { - "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 1) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - }, - }, { - "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 2) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", auth[1].Name) - assert.Equal(t, model.AuthTypeBasic, auth[1].Scheme) - basicProperties := auth[1].Properties.Basic - assert.Equal(t, "test_user", basicProperties.Username) - assert.Equal(t, "test_pwd", basicProperties.Password) - // metadata - assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) - assert.Equal(t, model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.Bearer.Metadata) - assert.Equal(t, &model.Metadata{"metadataState": model.FromString("state info")}, w.States[0].Metadata) - }, - }, { - "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/checkinbox.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - operationState := w.States[0].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, w.States, 2) - }, - }, { - // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/workflows/provisionorders.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Provision Orders", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.Len(t, w.States[0].OnErrors, 3) - assert.Equal(t, "Missing order id", w.States[0].OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", w.States[0].OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", w.States[0].OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", w.States[0].OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", w.States[0].OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", w.States[0].OnErrors[2].Transition.NextState) - }, - }, { - "./testdata/workflows/checkinbox.cron-test.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].Transition.NextState) - assert.Nil(t, w.States[0].End) - assert.NotNil(t, w.States[1].End) - assert.True(t, w.States[1].End.Terminate) - }, - }, { - "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].Name) - }, - }, { - // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/workflows/patientonboarding.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Patient Onboarding Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].EventState) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, w.Retries) - assert.Len(t, w.Retries, 1) - assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) - assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) - }, - }, { - "./testdata/workflows/greetings-secret.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 1) - }, - }, { - "./testdata/workflows/greetings-secret-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 3) - }, - }, { - "./testdata/workflows/greetings-constants-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotEmpty(t, w.Constants) - assert.NotEmpty(t, w.Constants.Data["Translations"]) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.file.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/purchaseorderworkflow.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Purchase Order Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "P30D", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Notify Customer", w.Name) - switchState := w.States[1].SwitchState - - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - - endDataCondition := switchState.DataConditions[0] - assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) - assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) - assert.Equal(t, model.FromString("${ del(.customerCount) }"), endDataCondition.End.ContinueAs.Data) - assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) - assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) - assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) - }, - }, { - name: "./testdata/workflows/greetings-v08-spec.sw.yaml", - f: func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "custom.greeting", w.ID) - assert.Equal(t, "1.0", w.Version) - assert.Equal(t, "0.8", w.SpecVersion) - - // Workflow "name" no longer a required property - assert.Empty(t, w.Name) - - // Functions: - assert.NotEmpty(t, w.Functions[0]) - assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) - assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) - assert.Equal(t, "/path/to/my/script/greeting.ts#CustomGreeting", w.Functions[0].Operation) - - assert.NotEmpty(t, w.Functions[1]) - assert.Equal(t, "sendTextFunction", w.Functions[1].Name) - assert.Equal(t, model.FunctionTypeGraphQL, w.Functions[1].Type) - assert.Equal(t, "http://myapis.org/inboxapi.json#sendText", w.Functions[1].Operation) - - assert.NotEmpty(t, w.Functions[2]) - assert.Equal(t, "greetingFunction", w.Functions[2].Name) - assert.Equal(t, model.FunctionTypeREST, w.Functions[2].Type) - assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) - - // Delay state - assert.NotEmpty(t, w.States[0].DelayState.TimeDelay) - assert.Equal(t, "GreetDelay", w.States[0].Name) - assert.Equal(t, model.StateTypeDelay, w.States[0].Type) - assert.Equal(t, "StoreCarAuctionBid", w.States[0].Transition.NextState) - - // Event state - assert.NotEmpty(t, w.States[1].EventState.OnEvents) - assert.Equal(t, "StoreCarAuctionBid", w.States[1].Name) - assert.Equal(t, model.StateTypeEvent, w.States[1].Type) - assert.Equal(t, true, w.States[1].EventState.Exclusive) - assert.NotEmpty(t, true, w.States[1].EventState.OnEvents[0]) - assert.Equal(t, []string{"CarBidEvent"}, w.States[1].EventState.OnEvents[0].EventRefs) - assert.Equal(t, true, w.States[1].EventState.OnEvents[0].EventDataFilter.UseData) - assert.Equal(t, "test", w.States[1].EventState.OnEvents[0].EventDataFilter.Data) - assert.Equal(t, "testing", w.States[1].EventState.OnEvents[0].EventDataFilter.ToStateData) - assert.Equal(t, model.ActionModeParallel, w.States[1].EventState.OnEvents[0].ActionMode) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.RefName) - assert.Equal(t, "funcref1", w.States[1].EventState.OnEvents[0].Actions[0].Name) - assert.Equal(t, map[string]model.Object{"bid": model.FromString("${ .bid }")}, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.Arguments) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[1].EventRef) - assert.Equal(t, "eventRefName", w.States[1].EventState.OnEvents[0].Actions[1].Name) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ResultEventRef) - - data := model.FromString("${ .patientInfo }") - assert.Equal(t, &data, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.Data) - assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ContextAttributes) - - assert.Equal(t, "PT1S", w.States[1].EventState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[1].EventState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "PT1H", w.States[1].EventState.Timeouts.EventTimeout) - assert.Equal(t, "PT3S", w.States[1].EventState.Timeouts.ActionExecTimeout) - - // Parallel state - assert.NotEmpty(t, w.States[2].ParallelState.Branches) - assert.Equal(t, "ShortDelayBranch", w.States[2].ParallelState.Branches[0].Name) - assert.Equal(t, "shortdelayworkflowid", w.States[2].ParallelState.Branches[0].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "PT5H", w.States[2].ParallelState.Branches[0].Timeouts.ActionExecTimeout) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Branches[0].Timeouts.BranchExecTimeout) - assert.Equal(t, "LongDelayBranch", w.States[2].ParallelState.Branches[1].Name) - assert.Equal(t, "longdelayworkflowid", w.States[2].ParallelState.Branches[1].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "ParallelExec", w.States[2].Name) - assert.Equal(t, model.StateTypeParallel, w.States[2].Type) - assert.Equal(t, model.CompletionTypeAtLeast, w.States[2].ParallelState.CompletionType) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Timeouts.BranchExecTimeout) - assert.Equal(t, "PT1S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, intstr.IntOrString{IntVal: 13}, w.States[2].ParallelState.NumCompleted) - - // Switch state - assert.NotEmpty(t, w.States[3].SwitchState.EventConditions) - assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].Name) - assert.Equal(t, model.StateTypeSwitch, w.States[3].Type) - assert.Equal(t, "visaApprovedEvent", w.States[3].EventConditions[0].Name) - assert.Equal(t, "visaApprovedEventRef", w.States[3].EventConditions[0].EventRef) - assert.Equal(t, "HandleApprovedVisa", w.States[3].EventConditions[0].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "mastercard": model.FromString("disallowed"), - "visa": model.FromString("allowed"), - }, - w.States[3].EventConditions[0].Metadata, - ) - assert.Equal(t, "visaRejectedEvent", w.States[3].EventConditions[1].EventRef) - assert.Equal(t, "HandleRejectedVisa", w.States[3].EventConditions[1].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "test": model.FromString("tested"), - }, - w.States[3].EventConditions[1].Metadata, - ) - assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) - assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "HandleNoVisaDecision", w.States[3].SwitchState.DefaultCondition.Transition.NextState) - - // DataBasedSwitchState - dataBased := w.States[4].SwitchState - assert.NotEmpty(t, dataBased.DataConditions) - assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].Name) - dataCondition := dataBased.DataConditions[0] - assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) - assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) - assert.Equal(t, "RejectApplication", w.States[4].DefaultCondition.Transition.NextState) - assert.Equal(t, "PT1S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Single) - - // operation state - assert.NotEmpty(t, w.States[5].OperationState.Actions) - assert.Equal(t, "GreetSequential", w.States[5].Name) - assert.Equal(t, model.StateTypeOperation, w.States[5].Type) - assert.Equal(t, model.ActionModeSequential, w.States[5].OperationState.ActionMode) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[5].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].FunctionRef.RefName) - - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) - assert.Equal(t, "PT1H", w.States[5].OperationState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT1S", w.States[5].OperationState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[5].OperationState.Timeouts.StateExecTimeout.Single) - - // forEach state - assert.NotEmpty(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "SendTextForHighPriority", w.States[6].Name) - assert.Equal(t, model.ForEachModeTypeSequential, w.States[6].ForEachState.Mode) - assert.Equal(t, model.StateTypeForEach, w.States[6].Type) - assert.Equal(t, "${ .messages }", w.States[6].ForEachState.InputCollection) - assert.Equal(t, "${ .outputMessages }", w.States[6].ForEachState.OutputCollection) - assert.Equal(t, "${ .this }", w.States[6].ForEachState.IterationParam) - - batchSize := intstr.FromInt(45) - assert.Equal(t, &batchSize, w.States[6].ForEachState.BatchSize) - - assert.NotNil(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "test", w.States[6].ForEachState.Actions[0].Name) - assert.NotNil(t, w.States[6].ForEachState.Actions[0].FunctionRef) - assert.Equal(t, "sendTextFunction", w.States[6].ForEachState.Actions[0].FunctionRef.RefName) - assert.Equal(t, map[string]model.Object{"message": model.FromString("${ .singlemessage }")}, w.States[6].ForEachState.Actions[0].FunctionRef.Arguments) - - // assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) - - assert.Equal(t, "PT11H", w.States[6].ForEachState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT11S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Single) - - // Inject state - assert.Equal(t, "HelloInject", w.States[7].Name) - assert.Equal(t, model.StateTypeInject, w.States[7].Type) - assert.Equal(t, model.FromString("Hello World, last state!"), w.States[7].InjectState.Data["result"]) - assert.Equal(t, model.FromBool(false), w.States[7].InjectState.Data["boolValue"]) - assert.Equal(t, "PT11M", w.States[7].InjectState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[7].InjectState.Timeouts.StateExecTimeout.Single) - - // callback state - assert.NotEmpty(t, w.States[8].CallbackState.Action) - assert.Equal(t, "CheckCreditCallback", w.States[8].Name) - assert.Equal(t, model.StateTypeCallback, w.States[8].Type) - assert.Equal(t, "callCreditCheckMicroservice", w.States[8].CallbackState.Action.FunctionRef.RefName) - assert.Equal(t, - map[string]model.Object{ - "argsObj": model.FromMap(map[string]interface{}{"age": 10, "name": "hi"}), - "customer": model.FromString("${ .customer }"), - "time": model.FromInt(48), - }, - w.States[8].CallbackState.Action.FunctionRef.Arguments, - ) - assert.Equal(t, "PT10S", w.States[8].CallbackState.Action.Sleep.Before) - assert.Equal(t, "PT20S", w.States[8].CallbackState.Action.Sleep.After) - assert.Equal(t, "PT150M", w.States[8].CallbackState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT34S", w.States[8].CallbackState.Timeouts.EventTimeout) - assert.Equal(t, "PT115M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Single) - - assert.Equal(t, true, w.States[8].CallbackState.EventDataFilter.UseData) - assert.Equal(t, "test data", w.States[8].CallbackState.EventDataFilter.Data) - assert.Equal(t, "${ .customer }", w.States[8].CallbackState.EventDataFilter.ToStateData) - - // sleepState - assert.NotEmpty(t, w.States[9].SleepState.Duration) - assert.Equal(t, "WaitForCompletionSleep", w.States[9].Name) - assert.Equal(t, model.StateTypeSleep, w.States[9].Type) - assert.Equal(t, "PT5S", w.States[9].SleepState.Duration) - assert.NotNil(t, w.States[9].SleepState.Timeouts) - assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, true, w.States[9].End.Terminate) - - // switch state with DefaultCondition as string - assert.NotEmpty(t, w.States[10].SwitchState) - assert.Equal(t, "HelloStateWithDefaultConditionString", w.States[10].Name) - assert.Equal(t, "${ true }", w.States[10].SwitchState.DataConditions[0].Condition) - assert.Equal(t, "HandleApprovedVisa", w.States[10].SwitchState.DataConditions[0].Transition.NextState) - assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) - assert.Equal(t, true, w.States[10].End.Terminate) - }, - }, { - "./testdata/workflows/dataInputSchemaObject.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.DataInputSchema) - expected := model.Object{} - err := json.Unmarshal([]byte("{\"title\": \"Hello World Schema\", \"properties\": {\"person\": "+ - "{\"type\": \"object\",\"properties\": {\"name\": {\"type\": \"string\"}},\"required\": "+ - "[\"name\"]}}, \"required\": [\"person\"]}"), - &expected) - fmt.Printf("err: %s\n", err) - fmt.Printf("schema: %+v\n", expected) - assert.Equal(t, &expected, w.DataInputSchema.Schema) - assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) - }, + name: "Valid YAML File", + filePath: "testdata/valid_workflow.yaml", + expectError: false, + }, + { + name: "Invalid YAML File", + filePath: "testdata/invalid_workflow.yaml", + expectError: true, + }, + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Non-existent File", + filePath: "testdata/nonexistent_workflow.yaml", + expectError: true, }, } - for _, file := range files { - t.Run( - file.name, func(t *testing.T) { - workflow, err := FromFile(file.name) - if assert.NoError(t, err, "Test File %s", file.name) { - assert.NotNil(t, workflow, "Test File %s", file.name) - file.f(t, workflow) - } - }, - ) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + workflow, err := FromFile(tt.filePath) + if tt.expectError { + assert.Error(t, err) + assert.Nil(t, workflow) + } else { + assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) + } + }) } } -func TestUnmarshalWorkflowBasicTests(t *testing.T) { - t.Run("BasicWorkflowYamlNoAuthDefs", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: TestUnmarshalWorkflowBasicTests -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: inject - data: - result: Hello World! - end: true -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow) - - b, err := json.Marshal(workflow) - assert.Nil(t, err) - assert.True(t, !strings.Contains(string(b), "auth")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) - - t.Run("BasicWorkflowBasicAuthJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ], - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "transition": "Next Hello State" - }, - { - "name": "Next Hello State", - "type": "inject", - "data": { - "result": "Next Hello World!" +func TestCheckFilePath(t *testing.T) { + tests := []struct { + name string + filePath string + expectError bool + }{ + { + name: "Valid YAML File Path", + filePath: "testdata/valid_workflow.yaml", + expectError: false, }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowBasicAuthStringJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": "testdata/workflows/urifiles/auth.json", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowInteger", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.7", - "auth": 123, - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - - assert.NotNil(t, err) - assert.Equal(t, "auth must be string or array", err.Error()) - assert.Nil(t, workflow) - }) -} - -func TestUnmarshalWorkflowSwitchState(t *testing.T) { - t.Run("WorkflowStatesTest", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowStatesTest -description: Inject Hello World -start: GreetDelay -metadata: - metadata1: metadata1 - metadata2: metadata2 -auth: -- name: testAuth - scheme: bearer - properties: - token: test_token - metadata: - auth1: auth1 - auth2: auth2 -events: -- name: StoreBidFunction - type: store -- name: CarBidEvent - type: store -- name: visaRejectedEvent - type: store -- name: visaApprovedEventRef - type: store -functions: -- name: callCreditCheckMicroservice - operation: http://myapis.org/creditcheck.json#checkCredit -- name: StoreBidFunction - operation: http://myapis.org/storebid.json#storeBid -- name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: -- name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid -- name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: bidFunctionRef - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .thatBid }" - time: 32 - name: bidEventRef - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec -- name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased -- name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: - nextState: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: - nextState: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT10H - stateExecTimeout: - total: PT10S - single: PT20S - defaultCondition: - transition: - nextState: HelloStateWithDefaultConditionString -- name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority -- name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject -- name: HelloInject - type: inject - data: - result: Hello World, another state! - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: WaitForCompletionSleep -- name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true -- name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - time: 48 - argsObj: { - "name" : "hi", - "age": { - "initial": 10, - "final": 32 - } - } - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT199M - eventTimeout: PT348S - stateExecTimeout: - total: PT115M - single: PT22M - transition: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 50 - name: eventRefName - timeouts: - actionExecTimeout: PT777S - stateExecTimeout: - total: PT33M - single: PT123M - transition: HandleApprovedVisaSubFlow -- name: HandleApprovedVisaSubFlow - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -`)) - assert.NoError(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.NoError(t, err) - - // workflow and auth metadata - assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) - assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) - - // Callback state - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"},\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) - - // Operation State - assert.True(t, strings.Contains(string(b), `{"name":"HandleApprovedVisa","type":"operation","transition":{"nextState":"HandleApprovedVisaSubFlow"},"actionMode":"sequential","actions":[{"name":"eventRefName","eventRef":{"triggerEventRef":"StoreBidFunction","resultEventRef":"StoreBidFunction","data":"${ .patientInfo }","contextAttributes":{"customer":"${ .customer }","time":50},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"timeouts":{"stateExecTimeout":{"single":"PT123M","total":"PT33M"},"actionExecTimeout":"PT777S"}}`)) - - // Delay State - assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"StoreCarAuctionBid\"},\"timeDelay\":\"PT5S\"}")) - - // Event State - assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"transition\":{\"nextState\":\"ParallelExec\"},\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) - - // Parallel State - assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) - - // Switch State - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) - - // Switch State with string DefaultCondition - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"dataConditions\":[{\"condition\":\"${ true }\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) - - // Foreach State - assert.True(t, strings.Contains(string(b), `{"name":"SendTextForHighPriority","type":"foreach","transition":{"nextState":"HelloInject"},"inputCollection":"${ .messages }","outputCollection":"${ .outputMessages }","iterationParam":"${ .this }","batchSize":45,"actions":[{"name":"test","functionRef":{"refName":"sendTextFunction","arguments":{"message":"${ .singlemessage }"},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"mode":"sequential","timeouts":{"stateExecTimeout":{"single":"PT22S","total":"PT11S"},"actionExecTimeout":"PT11H"}}`)) - - // Inject State - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"transition\":{\"nextState\":\"WaitForCompletionSleep\"},\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) - - // Sleep State - assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - // Make sure that the Action FunctionRef is unmarshalled correctly - assert.Equal(t, model.FromString("${ .singlemessage }"), workflow.States[5].ForEachState.Actions[0].FunctionRef.Arguments["message"]) - assert.Equal(t, "sendTextFunction", workflow.States[5].ForEachState.Actions[0].FunctionRef.RefName) - assert.NoError(t, err) - - }) - - t.Run("WorkflowSwitchStateDataConditions with wrong field name", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowSwitchStateDataConditions with wrong field name -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: switch - dataCondition: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: - transition: - nextState: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleRejectedVisaWorkflowID - end: - terminate: true -- name: HandleNoVisaDecision - type: operation - actions: - - subFlowRef: - workflowId: handleNoVisaDecisionWorkflowId - end: - terminate: true -`)) - if assert.Error(t, err) { - assert.Equal(t, `workflow.states[0].switchState.dataConditions is required`, err.Error()) - } - assert.Nil(t, workflow) - }) - - t.Run("Test complex workflow with compensate transitions", func(t *testing.T) { - workflow, err := FromFile("./testdata/workflows/compensate.sw.json") - - assert.Nil(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.Nil(t, err) + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Directory Path", + filePath: "testdata", + expectError: true, + }, + } - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := checkFilePath(tt.filePath) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } } diff --git a/parser/testdata/applicationrequestfunctions.json b/parser/testdata/applicationrequestfunctions.json deleted file mode 100644 index bafc861..0000000 --- a/parser/testdata/applicationrequestfunctions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/application.json#emailRejection" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/applicationrequestretries.json b/parser/testdata/applicationrequestretries.json deleted file mode 100644 index 40f83b5..0000000 --- a/parser/testdata/applicationrequestretries.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/constantsDogs.json b/parser/testdata/constantsDogs.json deleted file mode 100644 index cd3f101..0000000 --- a/parser/testdata/constantsDogs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Translations": { - "Dog": { - "Serbian": "pas", - "Spanish": "perro", - "French": "chien" - } - } -} \ No newline at end of file diff --git a/parser/testdata/datainputschema.json b/parser/testdata/datainputschema.json deleted file mode 100644 index bace233..0000000 --- a/parser/testdata/datainputschema.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - } -} \ No newline at end of file diff --git a/parser/testdata/errors.json b/parser/testdata/errors.json deleted file mode 100644 index 099e14d..0000000 --- a/parser/testdata/errors.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventbasedgreetingevents.json b/parser/testdata/eventbasedgreetingevents.json deleted file mode 100644 index b63f2bf..0000000 --- a/parser/testdata/eventbasedgreetingevents.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/functiondefs.json b/parser/testdata/functiondefs.json deleted file mode 100644 index fc7dd94..0000000 --- a/parser/testdata/functiondefs.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "functions": [ - { - "name": "checkFundsAvailability", - "operation": "file://myapis/billingapis.json#checkFunds" - }, - { - "name": "sendSuccessEmail", - "operation": "file://myapis/emailapis.json#paymentSuccess" - }, - { - "name": "sendInsufficientFundsEmail", - "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventdefs.yml b/parser/testdata/invalid_workflow.yaml similarity index 59% rename from parser/testdata/eventdefs.yml rename to parser/testdata/invalid_workflow.yaml index dd2c3b7..32e25a9 100644 --- a/parser/testdata/eventdefs.yml +++ b/parser/testdata/invalid_workflow.yaml @@ -1,22 +1,25 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -events: -- name: PaymentReceivedEvent - type: payment.receive - source: paymentEventSource - correlation: - - contextAttributeName: accountId -- name: ConfirmationCompletedEvent - type: payment.confirmation - kind: produced \ No newline at end of file + +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + # Missing "endpoint" field, making it invalid \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json deleted file mode 100644 index e5316d9..0000000 --- a/parser/testdata/secrets.json +++ /dev/null @@ -1,6 +0,0 @@ - -[ - "SECRET1", - "SECRET2", - "SECRET3" -] \ No newline at end of file diff --git a/parser/testdata/timeouts.json b/parser/testdata/timeouts.json deleted file mode 100644 index c3586bd..0000000 --- a/parser/testdata/timeouts.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } -} \ No newline at end of file diff --git a/parser/testdata/valid_workflow.json b/parser/testdata/valid_workflow.json new file mode 100644 index 0000000..204e917 --- /dev/null +++ b/parser/testdata/valid_workflow.json @@ -0,0 +1,19 @@ +{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.yaml b/parser/testdata/valid_workflow.yaml similarity index 62% rename from parser/testdata/workflows/urifiles/auth.yaml rename to parser/testdata/valid_workflow.yaml index 14ba4e2..19df6c4 100644 --- a/parser/testdata/workflows/urifiles/auth.yaml +++ b/parser/testdata/valid_workflow.yaml @@ -1,10 +1,10 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -- name: testAuth - properties: - token: test_token - scheme: bearer -- name: testAuth2 - properties: - password: test_pwd - username: test_user - scheme: basic +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com \ No newline at end of file diff --git a/parser/testdata/workflows/VetAppointmentWorkflow.json b/parser/testdata/workflows/VetAppointmentWorkflow.json deleted file mode 100644 index f6c0d43..0000000 --- a/parser/testdata/workflows/VetAppointmentWorkflow.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "id": "VetAppointmentWorkflow", - "name": "Vet Appointment Workflow", - "description": "Vet service call via events", - "version": "1.0", - "specVersion": "0.8", - "start": "MakeVetAppointmentState", - "events": [ - { - "name": "MakeVetAppointment", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "produced" - }, - { - "name": "VetAppointmentInfo", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "consumed" - } - ], - "states": [ - { - "name": "MakeVetAppointmentState", - "type": "operation", - "actions": [ - { - "name": "MakeAppointmentAction", - "eventRef": { - "triggerEventRef": "MakeVetAppointment", - "data": "${ .patientInfo }", - "resultEventRef": "VetAppointmentInfo" - }, - "actionDataFilter": { - "results": "${ .appointmentInfo }" - } - } - ], - "timeouts": { - "actionExecTimeout": "PT15M" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/actiondata-defaultvalue.yaml b/parser/testdata/workflows/actiondata-defaultvalue.yaml deleted file mode 100644 index 6b1628d..0000000 --- a/parser/testdata/workflows/actiondata-defaultvalue.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - id: greetingId - name: Greet - type: operation - actions: - - name: greeting - functionRef: - refName: greetingFunction - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue103.json b/parser/testdata/workflows/applicationrequest-issue103.json deleted file mode 100644 index 48b71fc..0000000 --- a/parser/testdata/workflows/applicationrequest-issue103.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": "./testdata/workflows/urifiles/auth.yaml", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml deleted file mode 100644 index 395ac8b..0000000 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: applicantrequest -version: '1.0' -name: Applicant Request Decision Workflow -description: Determine if applicant request is valid -start: CheckApplication -specVersion: "0.8" -functions: - - name: sendRejectionEmailFunction - operation: http://myapis.org/applicationapi.json#emailRejection -states: - - name: CheckApplication - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: StartApplication - - condition: "${ .applicants | .age < 18 }" - transition: RejectApplication - defaultCondition: - transition: RejectApplication - - name: StartApplication - type: operation - actions: - - subFlowRef: - workflowId: startApplicationWorkflowId - end: true - - name: RejectApplication - type: operation - actionMode: sequential - actions: - - functionRef: - refName: sendRejectionEmailFunction - arguments: - applicant: "${ .applicant }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json deleted file mode 100644 index 99b373c..0000000 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": "file://testdata/workflows/urifiles/auth.json", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json deleted file mode 100644 index 674532a..0000000 --- a/parser/testdata/workflows/applicationrequest.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "specVersion": "0.8", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": "StartApplication" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json deleted file mode 100644 index 0bdfe5f..0000000 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "metadata":{ - "metadata1": "metadata1", - "metadata2": "metadata2" - }, - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token", - "metadata":{ - "auth1": "auth1", - "auth2": "auth2" - } - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "metadata": { - "metadataState": "state info" - }, - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json deleted file mode 100644 index 309cf8f..0000000 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "start": { - "stateName": "CheckApplication" - }, - "functions": "file://testdata/applicationrequestfunctions.json", - "retries": "file://testdata/applicationrequestretries.json", - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json deleted file mode 100644 index c7c341d..0000000 --- a/parser/testdata/workflows/applicationrequest.url.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestfunctions.json", - "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestretries.json", - "start": { - "stateName": "CheckApplication" - }, - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkInbox.json b/parser/testdata/workflows/checkInbox.json deleted file mode 100644 index 0256a8e..0000000 --- a/parser/testdata/workflows/checkInbox.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "checkInbox", - "name": "Check Inbox Workflow", - "version": "1.0", - "specVersion": "0.8", - "description": "Periodically Check Inbox", - "start": { - "stateName": "CheckInbox", - "schedule": { - "cron": "0 0/15 * * * ?" - } - }, - "functions": [ - { - "name": "checkInboxFunction", - "operation": "http://myapis.org/inboxapi.json#checkNewMessages" - }, - { - "name": "sendTextFunction", - "operation": "http://myapis.org/inboxapi.json#sendText" - } - ], - "states": [ - { - "name": "CheckInbox", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": "checkInboxFunction" - } - ], - "transition": "SendTextForHighPriority" - }, - { - "name": "SendTextForHighPriority", - "type": "foreach", - "inputCollection": "${ .messages }", - "iterationParam": "singlemessage", - "actions": [ - { - "functionRef": { - "refName": "sendTextFunction", - "arguments": { - "message": "${ .singlemessage }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkcarvitals.json b/parser/testdata/workflows/checkcarvitals.json deleted file mode 100644 index a0f14ef..0000000 --- a/parser/testdata/workflows/checkcarvitals.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "id": "checkcarvitals", - "name": "Check Car Vitals Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "WhenCarIsOn", - "states": [ - { - "name": "WhenCarIsOn", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "CarTurnedOnEvent" - ] - } - ], - "transition": "DoCarVitalChecks" - }, - { - "name": "DoCarVitalChecks", - "type": "operation", - "actions": [ - { - "subFlowRef": "vitalscheck", - "sleep": { - "after": "PT1S" - } - } - ], - "transition": "CheckContinueVitalChecks" - }, - { - "name": "CheckContinueVitalChecks", - "type": "switch", - "eventConditions": [ - { - "name": "Car Turned Off Condition", - "eventRef": "CarTurnedOffEvent", - "end": true - } - ], - "defaultCondition": { - "transition": "DoCarVitalChecks" - } - } - ], - "events": [ - { - "name": "CarTurnedOnEvent", - "type": "car.events", - "source": "my/car" - }, - { - "name": "CarTurnedOffEvent", - "type": "car.events", - "source": "my/car" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml deleted file mode 100644 index 0729e80..0000000 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: checkInboxFunction - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml deleted file mode 100644 index e42d9a2..0000000 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: - expression: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: - refName: checkInboxFunction - transition: - nextState: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/compensate.sw.json b/parser/testdata/workflows/compensate.sw.json deleted file mode 100644 index 9f6ab1f..0000000 --- a/parser/testdata/workflows/compensate.sw.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "id": "compensation", - "version": "1.0", - "name": "Workflow Error example", - "description": "An example of how compensation works", - "specVersion": "0.8", - "start": "printStatus", - "functions": [ - { - "name": "PrintOutput", - "type": "custom", - "operation": "sysout" - } - ], - "states": [ - { - "name": "printStatus", - "type": "inject", - "data": { - "compensated": false - }, - "compensatedBy": "compensating", - "transition": "branch" - }, - { - "name": "branch", - "type": "switch", - "dataConditions": [ - { - "condition": ".shouldCompensate==true", - "transition": { - "nextState": "finish_compensate", - "compensate": true - } - }, - { - "condition": ".shouldCompensate==false", - "transition": { - "nextState": "finish_not_compensate", - "compensate": false - } - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "compensating", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensated": true - }, - "transition": "compensating_more" - }, - { - "name": "compensating_more", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensating_more": "Real Betis Balompie" - }, - "end": true - }, - { - "name": "finish_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - }, - { - "name": "finish_not_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_not_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/compensation.sw.json b/parser/testdata/workflows/compensation.sw.json deleted file mode 100644 index 567a501..0000000 --- a/parser/testdata/workflows/compensation.sw.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "id": "compensation", - "version": "1.0", - "name": "Workflow Error example", - "description": "An example of how compensation works", - "start": "printStatus", - "states": [ - { - "name": "printStatus", - "type": "inject", - "data": { - "compensated": false - }, - "compensatedBy" : "compensating", - "transition": "branch" - }, - { - "name": "branch", - "type": "switch", - "dataConditions": [ - { - "condition": ".shouldCompensate==true", - "transition": { - "nextState" : "finish_compensate", - "compensate" : true - } - }, - { - "condition": ".shouldCompensate==false", - "transition": { - "nextState" : "finish_not_compensate", - "compensate" : false - } - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "compensating", - "usedForCompensation" : true, - "type": "inject", - "data": { - "compensated": true - }, - "transition" : "compensating_more" - }, - { - "name": "compensating_more", - "usedForCompensation" : true, - "type": "inject", - "data": { - "compensating_more": "Real Betis Balompie" - } - }, - { - "name": "finish_compensate", - "type": "operation", - "actions": [], - "end": { - "compensate": true - } - }, - { - "name": "finish_not_compensate", - "type": "operation", - "actions": [], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/conditionbasedstate.yaml b/parser/testdata/workflows/conditionbasedstate.yaml deleted file mode 100644 index f42b56d..0000000 --- a/parser/testdata/workflows/conditionbasedstate.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - condition: "${ .applicants | .age < 18 }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/continue-as-example.yaml b/parser/testdata/workflows/continue-as-example.yaml deleted file mode 100644 index b5957f5..0000000 --- a/parser/testdata/workflows/continue-as-example.yaml +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: notifycustomerworkflow -name: Notify Customer -version: '1.0' -specVersion: '0.8' -start: WaitForCustomerEvent -states: - - name: WaitForCustomerEvent - type: event - onEvents: - - eventRefs: - - CustomerEvent - eventDataFilter: - data: "${ .customerId }" - toStateData: "${ .eventCustomerId }" - actions: - - functionRef: - refName: NotifyCustomerFunction - arguments: - customerId: "${ .eventCustomerId }" - stateDataFilter: - output: "${ .count = .count + 1 }" - transition: CheckEventQuota - - name: CheckEventQuota - type: switch - dataConditions: - - condition: "${ try(.customerCount) != null and .customerCount > .quota.maxConsumedEvents}" - end: - continueAs: - workflowId: notifycustomerworkflow - version: '1.0' - data: "${ del(.customerCount) }" - workflowExecTimeout: - duration: "PT1H" - runBefore: "GenerateReport" - interrupt: true - defaultCondition: - transition: WaitForCustomerEvent -events: - - name: CustomerEvent - type: org.events.customerEvent - source: customerSource -functions: - - name: NotifyCustomerFunction - operation: http://myapis.org/customerapis.json#notifyCustomer \ No newline at end of file diff --git a/parser/testdata/workflows/customerbankingtransactions.json b/parser/testdata/workflows/customerbankingtransactions.json deleted file mode 100644 index 98fbd34..0000000 --- a/parser/testdata/workflows/customerbankingtransactions.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "id": "customerbankingtransactions", - "name": "Customer Banking Transactions Workflow", - "version": "1.0", - "specVersion": "0.8", - "autoRetries": true, - "constants": { - "largetxamount": 5000 - }, - "states": [ - { - "name": "ProcessTransactions", - "type": "foreach", - "inputCollection": "${ .customer.transactions }", - "iterationParam": "${ .tx }", - "actions": [ - { - "name": "Process Larger Transaction", - "functionRef": "Banking Service - Larger Tx", - "condition": "${ .tx >= $CONST.largetxamount }" - }, - { - "name": "Process Smaller Transaction", - "functionRef": "Banking Service - Smaller Tx", - "condition": "${ .tx < $CONST.largetxamount }" - } - ], - "end": true - } - ], - "functions": [ - { - "name": "Banking Service - Larger Tx", - "type": "asyncapi", - "operation": "banking.yaml#largerTransation" - }, - { - "name": "Banking Service - Smaller Tx", - "type": "asyncapi", - "operation": "banking.yaml#smallerTransation" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/customercreditcheck.json b/parser/testdata/workflows/customercreditcheck.json deleted file mode 100644 index 8a3914f..0000000 --- a/parser/testdata/workflows/customercreditcheck.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "customercreditcheck", - "version": "1.0", - "specVersion": "0.8", - "name": "Customer Credit Check Workflow", - "description": "Perform Customer Credit Check", - "start": "CheckCredit", - "functions": [ - { - "name": "creditCheckFunction", - "operation": "http://myapis.org/creditcheckapi.json#doCreditCheck" - }, - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/creditcheckapi.json#rejectionEmail" - }, - { - "name": "callCreditCheckMicroservice", - "operation": "http://myapis.org/creditcheckapi.json#creditCheckMicroservice" - } - ], - "events": [ - { - "name": "CreditCheckCompletedEvent", - "type": "creditCheckCompleteType", - "source": "creditCheckSource", - "correlation": [ - { - "contextAttributeName": "customerId" - } - ] - } - ], - "states": [ - { - "name": "CheckCredit", - "type": "callback", - "action": { - "functionRef": { - "refName": "callCreditCheckMicroservice", - "arguments": { - "customer": "${ .customer }" - } - } - }, - "eventRef": "CreditCheckCompletedEvent", - "timeouts": { - "stateExecTimeout": "PT15M" - }, - "transition": "EvaluateDecision" - }, - { - "name": "EvaluateDecision", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .creditCheck | .decision == \"Approved\" }", - "transition": "StartApplication" - }, - { - "condition": "${ .creditCheck | .decision == \"Denied\" }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaNotExists.yaml b/parser/testdata/workflows/dataInputSchemaNotExists.yaml deleted file mode 100644 index 7aa3712..0000000 --- a/parser/testdata/workflows/dataInputSchemaNotExists.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2024 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: orderprocessing -version: '1.0' -specVersion: '0.8' -start: ChooseOnLanguage -dataInputSchema: - schema: doesnexist.json - failOnValidationErrors: true -functions: - - name: greetFunction - type: custom - operation: sysout -states: - - name: ChooseOnLanguage - type: switch - dataConditions: - - condition: "${ .language == \"English\" }" - transition: GreetInEnglish - - condition: "${ .language == \"Spanish\" }" - transition: GreetInSpanish - defaultCondition: GreetInEnglish - - name: GreetInEnglish - type: inject - data: - greeting: "Hello from JSON Workflow, " - transition: GreetPerson - - name: GreetInSpanish - type: inject - data: - greeting: "Saludos desde JSON Workflow, " - transition: GreetPerson - - name: GreetPerson - type: operation - actions: - - name: greetAction - functionRef: - refName: greetFunction - arguments: - message: ".greeting+.name" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaObject.json b/parser/testdata/workflows/dataInputSchemaObject.json deleted file mode 100644 index 7b50c0d..0000000 --- a/parser/testdata/workflows/dataInputSchemaObject.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "id": "greeting", - "version": "1.0.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "dataInputSchema": { - "failOnValidationErrors": false, - "schema": { - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - }, - "required": [ - "person" - ] - } - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ {greeting: .greeting} }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaValidation.yaml b/parser/testdata/workflows/dataInputSchemaValidation.yaml deleted file mode 100644 index 4bc1e11..0000000 --- a/parser/testdata/workflows/dataInputSchemaValidation.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: Valid DataInputSchema -version: '1.0' -specVersion: '0.8' -start: Start -dataInputSchema: - failOnValidationErrors: false - schema: "file://testdata/datainputschema.json" -states: -- name: Start - type: inject - data: - done: true - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json deleted file mode 100644 index bdf80d6..0000000 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "id": "eventbaseddataandswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.8", - "start": { - "stateName": "Start" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "Start", - "type": "switch", - "dataConditions": [ - { - "condition": "${ true }", - "transition": "CheckVisaStatus" - } - ], - "defaultCondition": { - "transition": { - "nextState": "CheckVisaStatus" - } - } - }, - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json deleted file mode 100644 index 3510d11..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "start": "Greet", - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .greet }", - "toStateData": "${ .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet.name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload.greeting }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json deleted file mode 100644 index 589ad36..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": "file://testdata/eventbasedgreetingevents.json", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json deleted file mode 100644 index 80e81b0..0000000 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "id": "eventbasedgreetingexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - }, - { - "name": "greetingFunction2", - "operation": "file://myapis/greetingapis.json#greeting2" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - }, - { - "eventRefs": [ - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "{{ $.data.greet2 }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction2", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json deleted file mode 100644 index 946aa39..0000000 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "eventbasedgreetingnonexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent", - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json deleted file mode 100644 index 3d0075f..0000000 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "id": "eventbasedswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "specVersion": "0.8", - "start": { - "stateName": "CheckVisaStatus" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedswitchstate.json b/parser/testdata/workflows/eventbasedswitchstate.json deleted file mode 100644 index c1b48b0..0000000 --- a/parser/testdata/workflows/eventbasedswitchstate.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "id": "eventbasedswitchstate", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "start": "CheckVisaStatus", - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": "HandleApprovedVisa" - }, - { - "eventRef": "visaRejectedEvent", - "transition": "HandleRejectedVisa" - } - ], - "eventTimeout": "PT1H", - "defaultCondition": { - "transition": "HandleNoVisaDecision" - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleApprovedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleRejectedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleNoVisaDecisionWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/fillglassofwater.json b/parser/testdata/workflows/fillglassofwater.json deleted file mode 100644 index b45d84e..0000000 --- a/parser/testdata/workflows/fillglassofwater.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "id": "fillglassofwater", - "name": "Fill glass of water workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "Check if full", - "functions": [ - { - "name": "Increment Current Count Function", - "type": "expression", - "operation": ".counts.current += 1 | .counts.current" - } - ], - "states": [ - { - "name": "Check if full", - "type": "switch", - "dataConditions": [ - { - "name": "Need to fill more", - "condition": "${ .counts.current < .counts.max }", - "transition": "Add Water" - }, - { - "name": "Glass full", - "condition": ".counts.current >= .counts.max", - "end": true - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "Add Water", - "type": "operation", - "actions": [ - { - "functionRef": "Increment Current Count Function", - "actionDataFilter": { - "toStateData": ".counts.current" - } - } - ], - "transition": "Check if full" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/finalizeCollegeApplication.json b/parser/testdata/workflows/finalizeCollegeApplication.json deleted file mode 100644 index 9c93616..0000000 --- a/parser/testdata/workflows/finalizeCollegeApplication.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "id": "finalizeCollegeApplication", - "name": "Finalize College Application", - "version": "1.0", - "specVersion": "0.8", - "start": "FinalizeApplication", - "events": [ - { - "name": "ApplicationSubmitted", - "type": "org.application.submitted", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "SATScoresReceived", - "type": "org.application.satscores", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "RecommendationLetterReceived", - "type": "org.application.recommendationLetter", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - } - ], - "functions": [ - { - "name": "finalizeApplicationFunction", - "operation": "http://myapis.org/collegeapplicationapi.json#finalize" - } - ], - "states": [ - { - "name": "FinalizeApplication", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "ApplicationSubmitted", - "SATScoresReceived", - "RecommendationLetterReceived" - ], - "actions": [ - { - "functionRef": { - "refName": "finalizeApplicationFunction", - "arguments": { - "student": "${ .applicantId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml deleted file mode 100644 index 00f04f3..0000000 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -constants: "file://testdata/constantsDogs.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml deleted file mode 100644 index 27d00e1..0000000 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: "file://testdata/secrets.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml deleted file mode 100644 index 2f64a98..0000000 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: - - NAME -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .NAME }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml deleted file mode 100644 index 015a711..0000000 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: custom.greeting -version: '1.0' -specVersion: "0.8" -description: Greet Someone -# name: Greeting example #can be empty -# start: Greet #can be empty -functions: - - name: greetingCustomFunction - operation: /path/to/my/script/greeting.ts#CustomGreeting - # Support custom function type definition - type: custom - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText - type: graphql - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting - - name: StoreBidFunction - operation: http://myapis.org/inboxapi.json#storeBidFunction - - name: callCreditCheckMicroservice - operation: http://myapis.org/inboxapi.json#callCreditCheckMicroservice -events: - - name: StoreBidFunction - type: StoreBidFunction - source: StoreBidFunction - - name: CarBidEvent - type: typeCarBidEvent - source: sourceCarBidEvent - - name: visaApprovedEventRef - type: typeVisaApprovedEventRef - source: sourceVisaApprovedEventRef - - name: visaRejectedEvent - type: typeVisaRejectedEvent - source: sourceVisaRejectedEvent -states: - - name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid - - name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: funcref1 - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 48 - name: eventRefName - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec - - name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased - - name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - defaultCondition: - transition: HandleNoVisaDecision - - name: CheckApplicationSwitchDataBased - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: - nextState: StartApplication - defaultCondition: - transition: RejectApplication - timeouts: - stateExecTimeout: - total: PT1S - single: PT2S - - name: GreetSequential - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" - timeouts: - actionExecTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - stateDataFilter: - dataOutputPath: "${ .greeting }" - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject - - name: HelloInject - type: inject - data: - result: Hello World, last state! - boolValue: false - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: CheckCreditCallback - - name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - argsObj: { - "name" : "hi", - "age": 10 - } - time: 48 - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT150M - eventTimeout: PT34S - stateExecTimeout: - total: PT115M - single: PT22M - transition: WaitForCompletionSleep - - name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true - - name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: RejectApplication - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: HandleNoVisaDecision - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: StartApplication - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleApprovedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleRejectedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json deleted file mode 100644 index 8adeeb6..0000000 --- a/parser/testdata/workflows/greetings.sw.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ .greeting }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml deleted file mode 100644 index 8f5447b..0000000 --- a/parser/testdata/workflows/greetings.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - id: idx - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json deleted file mode 100644 index 9a434d4..0000000 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "name": "Greeting Workflow", - "description": "Greet Someone", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "SleepHere", - "type": "sleep", - "timeouts": { - "stateExecTimeout": "PT10S" - }, - "duration": "PT40S", - "transition": "Greet" - }, - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "parameters": { - "name": "${ .person | .name }" - } - }, - "actionDataFilter": { - "toStateData": "${ .greeting }" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/handleCarAuctionBid.json b/parser/testdata/workflows/handleCarAuctionBid.json deleted file mode 100644 index 6df46b0..0000000 --- a/parser/testdata/workflows/handleCarAuctionBid.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "handleCarAuctionBid", - "version": "1.0", - "specVersion": "0.8", - "name": "Car Auction Bidding Workflow", - "description": "Store a single bid whole the car auction is active", - "start": { - "stateName": "StoreCarAuctionBid", - "schedule": "R/PT2H" - }, - "functions": [ - { - "name": "StoreBidFunction", - "operation": "http://myapis.org/carauctionapi.json#storeBid" - } - ], - "events": [ - { - "name": "CarBidEvent", - "type": "carBidMadeType", - "source": "carBidEventSource" - } - ], - "states": [ - { - "name": "StoreCarAuctionBid", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "CarBidEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "StoreBidFunction", - "arguments": { - "bid": "${ .bid }" - } - } - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/helloworld.json b/parser/testdata/workflows/helloworld.json deleted file mode 100644 index 707b6ef..0000000 --- a/parser/testdata/workflows/helloworld.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "id": "helloworld", - "version": "1.0", - "specVersion": "0.8", - "name": "Hello World Workflow", - "description": "Inject Hello World", - "start": "Hello State", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/jobmonitoring.json b/parser/testdata/workflows/jobmonitoring.json deleted file mode 100644 index a11282b..0000000 --- a/parser/testdata/workflows/jobmonitoring.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "id": "jobmonitoring", - "version": "1.0", - "specVersion": "0.8", - "name": "Job Monitoring", - "description": "Monitor finished execution of a submitted job", - "start": "SubmitJob", - "functions": [ - { - "name": "submitJob", - "operation": "http://myapis.org/monitorapi.json#doSubmit" - }, - { - "name": "checkJobStatus", - "operation": "http://myapis.org/monitorapi.json#checkStatus" - }, - { - "name": "reportJobSuceeded", - "operation": "http://myapis.org/monitorapi.json#reportSucceeded" - }, - { - "name": "reportJobFailed", - "operation": "http://myapis.org/monitorapi.json#reportFailure" - } - ], - "states": [ - { - "name": "SubmitJob", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "submitJob", - "arguments": { - "name": "${ .job.name }" - } - }, - "actionDataFilter": { - "results": "${ .jobuid }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobuid }" - }, - "transition": "WaitForCompletion" - }, - { - "name": "WaitForCompletion", - "type": "sleep", - "duration": "PT5S", - "transition": "GetJobStatus" - }, - { - "name": "GetJobStatus", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "checkJobStatus", - "arguments": { - "name": "${ .jobuid }" - } - }, - "actionDataFilter": { - "results": "${ .jobstatus }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobstatus }" - }, - "transition": "DetermineCompletion" - }, - { - "name": "DetermineCompletion", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .jobStatus == \"SUCCEEDED\" }", - "transition": "JobSucceeded" - }, - { - "condition": "${ .jobStatus == \"FAILED\" }", - "transition": "JobFailed" - } - ], - "defaultCondition": { - "transition": "WaitForCompletion" - } - }, - { - "name": "JobSucceeded", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobSuceeded", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - }, - { - "name": "JobFailed", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobFailed", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/onboardcustomer.json b/parser/testdata/workflows/onboardcustomer.json deleted file mode 100644 index 85cb0d6..0000000 --- a/parser/testdata/workflows/onboardcustomer.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "id": "onboardcustomer", - "version": "1.0", - "specVersion": "0.8", - "name": "Onboard Customer", - "description": "Onboard a Customer", - "start": "Onboard", - "states": [ - { - "name": "Onboard", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "invoke": "async", - "onParentComplete": "continue", - "workflowId": "customeronboardingworkflow", - "version": "1.0" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/parallelexec.json b/parser/testdata/workflows/parallelexec.json deleted file mode 100644 index 7e33893..0000000 --- a/parser/testdata/workflows/parallelexec.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "parallelexec", - "version": "1.0", - "specVersion": "0.8", - "name": "Parallel Execution Workflow", - "description": "Executes two branches in parallel", - "start": "ParallelExec", - "states": [ - { - "name": "ParallelExec", - "type": "parallel", - "completionType": "allOf", - "branches": [ - { - "name": "ShortDelayBranch", - "actions": [ - { - "subFlowRef": "shortdelayworkflowid" - } - ] - }, - { - "name": "LongDelayBranch", - "actions": [ - { - "subFlowRef": "longdelayworkflowid" - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientVitalsWorkflow.json b/parser/testdata/workflows/patientVitalsWorkflow.json deleted file mode 100644 index a4fd8b5..0000000 --- a/parser/testdata/workflows/patientVitalsWorkflow.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "id": "patientVitalsWorkflow", - "name": "Monitor Patient Vitals", - "version": "1.0", - "specVersion": "0.8", - "start": "MonitorVitals", - "events": [ - { - "name": "HighBodyTemperature", - "type": "org.monitor.highBodyTemp", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighBloodPressure", - "type": "org.monitor.highBloodPressure", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighRespirationRate", - "type": "org.monitor.highRespirationRate", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - } - ], - "functions": [ - { - "name": "callPulmonologist", - "operation": "http://myapis.org/patientapis.json#callPulmonologist" - }, - { - "name": "sendTylenolOrder", - "operation": "http://myapis.org/patientapis.json#tylenolOrder" - }, - { - "name": "callNurse", - "operation": "http://myapis.org/patientapis.json#callNurse" - } - ], - "states": [ - { - "name": "MonitorVitals", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "HighBodyTemperature" - ], - "actions": [ - { - "functionRef": { - "refName": "sendTylenolOrder", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighBloodPressure" - ], - "actions": [ - { - "functionRef": { - "refName": "callNurse", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighRespirationRate" - ], - "actions": [ - { - "functionRef": { - "refName": "callPulmonologist", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml deleted file mode 100644 index 6ceb1a1..0000000 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: patientonboarding -name: Patient Onboarding Workflow -version: "1.0" -specVersion: "0.8" -start: Onboard -states: - - name: Onboard - type: event - onEvents: - - eventRefs: - - NewPatientEvent - actions: - - functionRef: StorePatient - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: AssignDoctor - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: ScheduleAppt - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - onErrors: - - errorRef: ServiceNotAvailable - end: true - end: true -events: - - name: NewPatientEvent - type: new.patients.event - source: newpatient/+ -functions: - - name: StorePatient - operation: api/services.json#storePatient - - name: StoreNewPatientInfo - operation: api/services.json#addPatient - - name: AssignDoctor - operation: api/services.json#assignDoctor - - name: ScheduleAppt - operation: api/services.json#scheduleAppointment -errors: - - name: ServiceNotAvailable - code: "503" -retries: - - name: ServicesNotAvailableRetryStrategy - delay: PT3S - maxAttempts: 10 - jitter: 0.0 - multiplier: 1.1 \ No newline at end of file diff --git a/parser/testdata/workflows/paymentconfirmation.json b/parser/testdata/workflows/paymentconfirmation.json deleted file mode 100644 index 815a73c..0000000 --- a/parser/testdata/workflows/paymentconfirmation.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "paymentconfirmation", - "version": "1.0", - "specVersion": "0.8", - "name": "Payment Confirmation Workflow", - "description": "Performs Payment Confirmation", - "functions": "file://functiondefs.json", - "events": "file://eventdefs.yml", - "states": [ - { - "name": "PaymentReceived", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "PaymentReceivedEvent" - ], - "actions": [ - { - "name": "checkfunds", - "functionRef": { - "refName": "checkFundsAvailability", - "arguments": { - "account": "${ .accountId }", - "paymentamount": "${ .payment.amount }" - } - } - } - ] - } - ], - "transition": "ConfirmBasedOnFunds" - }, - { - "name": "ConfirmBasedOnFunds", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .funds | .available == \"true\" }", - "transition": "SendPaymentSuccess" - }, - { - "condition": "${ .funds | .available == \"false\" }", - "transition": "SendInsufficientResults" - } - ], - "defaultCondition": { - "transition": "SendPaymentSuccess" - } - }, - { - "name": "SendPaymentSuccess", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendSuccessEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - }, - { - "name": "SendInsufficientResults", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendInsufficientFundsEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json deleted file mode 100644 index 7496b32..0000000 --- a/parser/testdata/workflows/provisionorders.sw.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "id": "provisionorders", - "version": "1.0", - "specVersion": "0.8", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": "ProvisionOrder", - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ], - "states": [ - { - "name": "ProvisionOrder", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .order }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .exceptions }" - }, - "transition": "ApplyOrder", - "onErrors": [ - { - "errorRef": "Missing order id", - "transition": "MissingId" - }, - { - "errorRef": "Missing order item", - "transition": "MissingItem" - }, - { - "errorRef": "Missing order quantity", - "transition": "MissingQuantity" - } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingIdExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingItemExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingQuantityExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ - { - "subFlowRef": "applyOrderWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json deleted file mode 100644 index 2596b04..0000000 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "id": "order", - "name": "Purchase Order Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "StartNewOrder", - "timeouts": { - "workflowExecTimeout": { - "duration": "P30D", - "runBefore": "CancelOrder" - } - }, - "states": [ - { - "name": "StartNewOrder", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderCreatedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogNewOrderCreated" - } - } - ] - } - ], - "transition": { - "nextState": "WaitForOrderConfirmation" - } - }, - { - "name": "WaitForOrderConfirmation", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderConfirmedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderConfirmed" - } - } - ] - } - ], - "transition": { - "nextState": "WaitOrderShipped" - } - }, - { - "name": "WaitOrderShipped", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "ShipmentSentEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderShipped" - } - } - ] - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderFinishedEvent" - } - ] - } - }, - { - "name": "CancelOrder", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "CancelOrder" - } - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderCancelledEvent" - } - ] - } - } - ], - "events": [ - { - "name": "OrderCreatedEvent", - "type": "my.company.orders", - "source": "/orders/new", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderConfirmedEvent", - "type": "my.company.orders", - "source": "/orders/confirmed", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "ShipmentSentEvent", - "type": "my.company.orders", - "source": "/orders/shipped", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderFinishedEvent", - "type": "my.company.orders", - "kind": "produced" - }, - { - "name": "OrderCancelledEvent", - "type": "my.company.orders", - "kind": "produced" - } - ], - "functions": [ - { - "name": "LogNewOrderCreated", - "operation": "http.myorg.io/ordersservices.json#logcreated" - }, - { - "name": "LogOrderConfirmed", - "operation": "http.myorg.io/ordersservices.json#logconfirmed" - }, - { - "name": "LogOrderShipped", - "operation": "http.myorg.io/ordersservices.json#logshipped" - }, - { - "name": "CancelOrder", - "operation": "http.myorg.io/ordersservices.json#calcelorder" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json deleted file mode 100644 index 9040643..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": "file://testdata/timeouts.json", - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json deleted file mode 100644 index 90c7c62..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.sw.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } - }, - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "TemperatureEvent", - "HumidityEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcloudeventonprovision.json b/parser/testdata/workflows/sendcloudeventonprovision.json deleted file mode 100644 index 7e5bc37..0000000 --- a/parser/testdata/workflows/sendcloudeventonprovision.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "sendcloudeventonprovision", - "version": "1.0", - "specVersion": "0.8", - "name": "Send CloudEvent on provision completion", - "start": "ProvisionOrdersState", - "events": [ - { - "name": "provisioningCompleteEvent", - "type": "provisionCompleteType", - "kind": "produced" - } - ], - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioning.json#doProvision" - } - ], - "states": [ - { - "name": "ProvisionOrdersState", - "type": "foreach", - "inputCollection": "${ .orders }", - "iterationParam": "singleorder", - "outputCollection": "${ .provisionedOrders }", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .singleorder }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "provisioningCompleteEvent", - "data": "${ .provisionedOrders }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcustomeremail.json b/parser/testdata/workflows/sendcustomeremail.json deleted file mode 100644 index 7e8d010..0000000 --- a/parser/testdata/workflows/sendcustomeremail.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id": "sendcustomeremail", - "version": "1.0", - "specVersion": "0.8", - "name": "Send customer email workflow", - "description": "Send email to a customer", - "start": "Send Email", - "functions": [ - { - "name": "emailFunction", - "operation": "file://myapis/emailapis.json#sendEmail" - } - ], - "states": [ - { - "name": "Send Email", - "type": "operation", - "actions": [ - { - "functionRef": { - "invoke": "async", - "refName": "emailFunction", - "arguments": { - "customer": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/solvemathproblems.json b/parser/testdata/workflows/solvemathproblems.json deleted file mode 100644 index a3083d0..0000000 --- a/parser/testdata/workflows/solvemathproblems.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "id": "solvemathproblems", - "version": "1.0", - "specVersion": "0.8", - "name": "Solve Math Problems Workflow", - "description": "Solve math problems", - "start": "Solve", - "functions": [ - { - "name": "solveMathExpressionFunction", - "operation": "http://myapis.org/mapthapis.json#solveExpression" - } - ], - "states": [ - { - "name": "Solve", - "type": "foreach", - "inputCollection": "${ .expressions }", - "iterationParam": "singleexpression", - "outputCollection": "${ .results }", - "actions": [ - { - "functionRef": { - "refName": "solveMathExpressionFunction", - "arguments": { - "expression": "${ .singleexpression }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .results }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.json b/parser/testdata/workflows/urifiles/auth.json deleted file mode 100644 index ff211df..0000000 --- a/parser/testdata/workflows/urifiles/auth.json +++ /dev/null @@ -1,17 +0,0 @@ -[ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } -] \ No newline at end of file diff --git a/parser/testdata/workflows/vitalscheck.json b/parser/testdata/workflows/vitalscheck.json deleted file mode 100644 index 3a89b78..0000000 --- a/parser/testdata/workflows/vitalscheck.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "vitalscheck", - "name": "Car Vitals Check", - "version": "1.0", - "specVersion": "0.8", - "start": "CheckVitals", - "states": [ - { - "name": "CheckVitals", - "type": "operation", - "actions": [ - { - "functionRef": "Check Tire Pressure" - }, - { - "functionRef": "Check Oil Pressure" - }, - { - "functionRef": "Check Coolant Level" - }, - { - "functionRef": "Check Battery" - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "DisplayChecksOnDashboard", - "data": "${ .evaluations }" - } - ] - } - } - ], - "functions": [ - { - "name": "Check Tire Pressure", - "operation": "mycarservices.json#checktirepressure" - }, - { - "name": "Check Oil Pressure", - "operation": "mycarservices.json#checkoilpressure" - }, - { - "name": "Check Coolant Level", - "operation": "mycarservices.json#checkcoolantlevel" - }, - { - "name": "Check Battery", - "operation": "mycarservices.json#checkbattery" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json deleted file mode 100644 index c0b72c8..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "P1S", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json deleted file mode 100644 index e72712d..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json deleted file mode 100644 index d01c35e..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json deleted file mode 100644 index 101b9bf..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/test/path.go b/test/path.go deleted file mode 100644 index 69c7113..0000000 --- a/test/path.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package test - -import ( - "os" - "path/filepath" - "runtime" - "strings" - - "github.com/pkg/errors" -) - -// CurrentProjectPath get the project root path -func CurrentProjectPath() string { - path := currentFilePath() - - ppath, err := filepath.Abs(filepath.Join(filepath.Dir(path), "../")) - if err != nil { - panic(errors.Wrapf(err, "Get current project path with %s failed", path)) - } - - f, err := os.Stat(ppath) - if err != nil { - panic(errors.Wrapf(err, "Stat project path %v failed", ppath)) - } - - if f.Mode()&os.ModeSymlink != 0 { - fpath, err := os.Readlink(ppath) - if err != nil { - panic(errors.Wrapf(err, "Readlink from path %v failed", fpath)) - } - ppath = fpath - } - - return ppath -} - -func currentFilePath() string { - _, file, _, _ := runtime.Caller(1) - if strings.HasSuffix(file, "/") { - return file - } - println("Returning an empty string for currentFilePath since it's not a caller path: " + file) - return "" -} diff --git a/test/path_test.go b/test/path_test.go deleted file mode 100644 index 4ccb672..0000000 --- a/test/path_test.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package test - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestCurrentProjectPath(t *testing.T) { - t.Run("normal test", func(t *testing.T) { - path := CurrentProjectPath() - - // NOTE: the '/code' path is used with code pipeline. - // When code running in the pipeline, the codebase will copy to /home/code directory. - assert.Regexp(t, "(/sdk-go$)|(/code$)", path) - }) -} diff --git a/test/utils.go b/test/utils.go new file mode 100644 index 0000000..d478edc --- /dev/null +++ b/test/utils.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "sigs.k8s.io/yaml" +) + +func AssertYAMLEq(t *testing.T, expected, actual string) { + var expectedMap, actualMap map[string]interface{} + + // Unmarshal the expected YAML + err := yaml.Unmarshal([]byte(expected), &expectedMap) + assert.NoError(t, err, "failed to unmarshal expected YAML") + + // Unmarshal the actual YAML + err = yaml.Unmarshal([]byte(actual), &actualMap) + assert.NoError(t, err, "failed to unmarshal actual YAML") + + // Assert equality of the two maps + assert.Equal(t, expectedMap, actualMap, "YAML structures do not match") +} diff --git a/tools.mod b/tools.mod index 69ff48c..203ee14 100644 --- a/tools.mod +++ b/tools.mod @@ -1,6 +1,6 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v3 -go 1.19 +go 1.22 require ( github.com/google/addlicense v0.0.0-20210428195630-6d92264d7170 // indirect diff --git a/util/floatstr/floatstr.go b/util/floatstr/floatstr.go deleted file mode 100644 index 7481271..0000000 --- a/util/floatstr/floatstr.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "strconv" - "strings" - - "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -// Float32OrString is a type that can hold a float32 or a string. -// implementation borrowed from apimachinary intstr package: https://github.com/kubernetes/apimachinery/blob/master/pkg/util/intstr/intstr.go -type Float32OrString struct { - Type Type `json:"type,omitempty"` - FloatVal float32 `json:"floatVal,omitempty"` - StrVal string `json:"strVal,omitempty"` -} - -// Type represents the stored type of Float32OrString. -type Type int64 - -const ( - // Float ... - Float Type = iota // The Float32OrString holds a float. - // String ... - String // The Float32OrString holds a string. -) - -// FromFloat creates an Float32OrString object with a float32 value. It is -// your responsibility not to call this method with a value greater -// than float32. -func FromFloat(val float32) Float32OrString { - return Float32OrString{Type: Float, FloatVal: val} -} - -// FromString creates a Float32OrString object with a string value. -func FromString(val string) Float32OrString { - return Float32OrString{Type: String, StrVal: val} -} - -// Parse the given string and try to convert it to a float32 before -// setting it as a string value. -func Parse(val string) Float32OrString { - f, err := strconv.ParseFloat(val, 32) - if err != nil { - return FromString(val) - } - return FromFloat(float32(f)) -} - -// UnmarshalJSON implements the json.Unmarshaller interface. -func (floatstr *Float32OrString) UnmarshalJSON(value []byte) error { - if value[0] == '"' { - floatstr.Type = String - return json.Unmarshal(value, &floatstr.StrVal) - } - floatstr.Type = Float - return json.Unmarshal(value, &floatstr.FloatVal) -} - -// MarshalJSON implements the json.Marshaller interface. -func (floatstr *Float32OrString) MarshalJSON() ([]byte, error) { - switch floatstr.Type { - case Float: - return json.Marshal(floatstr.FloatVal) - case String: - return json.Marshal(floatstr.StrVal) - default: - return []byte{}, fmt.Errorf("impossible Float32OrString.Type") - } -} - -// String returns the string value, or the float value. -func (floatstr *Float32OrString) String() string { - if floatstr == nil { - return "" - } - if floatstr.Type == String { - return floatstr.StrVal - } - return strconv.FormatFloat(float64(floatstr.FloatValue()), 'E', -1, 32) -} - -// FloatValue returns the FloatVal if type float32, or if -// it is a String, will attempt a conversion to float32, -// returning 0 if a parsing error occurs. -func (floatstr *Float32OrString) FloatValue() float32 { - if floatstr.Type == String { - f, _ := strconv.ParseFloat(floatstr.StrVal, 32) - return float32(f) - } - return floatstr.FloatVal -} - -func init() { - val.GetValidator().RegisterCustomTypeFunc(func(fl reflect.Value) interface{} { - if fl.Kind() != reflect.Struct { - return errors.New("invalid type: expected Float32OrString") - } - - // Get the Float32OrString value - _, ok := fl.Interface().(Float32OrString) - if !ok { - return fmt.Errorf("invalid type: expected Float32OrString") - } - - return nil - }, Float32OrString{}) -} - -func ValidateFloat32OrString(sl validator.StructLevel) { - // Get the current struct being validated. - current := sl.Current() - - for i := 0; i < current.NumField(); i++ { - field := current.Type().Field(i) - value := current.Field(i) - - // Check if the field is a pointer and handle nil pointers. - if value.Kind() == reflect.Ptr { - if value.IsNil() { - continue // Skip nil pointers. - } - value = value.Elem() // Dereference the pointer. - } - - // Check if the field is of type Float32OrString. - if value.Type() == reflect.TypeOf(Float32OrString{}) { - // Extract validation tags from the field. - tags := field.Tag.Get("validate") - - // Split tags and look for min/max. - tagList := strings.Split(tags, ",") - for _, tag := range tagList { - if strings.HasPrefix(tag, "min=") { - minVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "min="), 32) - if err != nil { - sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") - continue - } - - if value.FieldByName("FloatVal").Float() < minVal { - sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") - } - } - - if strings.HasPrefix(tag, "max=") { - maxVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "max="), 32) - if err != nil { - sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") - continue - } - - if value.FieldByName("FloatVal").Float() > maxVal { - sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") - } - } - } - } - } -} diff --git a/util/floatstr/floatstr_test.go b/util/floatstr/floatstr_test.go deleted file mode 100644 index ee25fbe..0000000 --- a/util/floatstr/floatstr_test.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "reflect" - "testing" - - "k8s.io/apimachinery/pkg/util/yaml" -) - -func TestFromFloat(t *testing.T) { - i := FromFloat(93.93) - if i.Type != Float || i.FloatVal != 93.93 { - t.Errorf("Expected FloatVal=93.93, got %+v", i) - } -} - -func TestFromString(t *testing.T) { - i := FromString("76.76") - if i.Type != String || i.StrVal != "76.76" { - t.Errorf("Expected StrVal=\"76.76\", got %+v", i) - } -} - -type FloatOrStringHolder struct { - FOrS Float32OrString `json:"val"` -} - -func TestIntOrStringUnmarshalJSON(t *testing.T) { - cases := []struct { - input string - result Float32OrString - }{ - {"{\"val\": 123.123}", FromFloat(123.123)}, - {"{\"val\": \"123.123\"}", FromString("123.123")}, - } - - for _, c := range cases { - var result FloatOrStringHolder - if err := json.Unmarshal([]byte(c.input), &result); err != nil { - t.Errorf("Failed to unmarshal input '%v': %v", c.input, err) - } - if result.FOrS != c.result { - t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result) - } - } -} - -func TestIntOrStringMarshalJSON(t *testing.T) { - cases := []struct { - input Float32OrString - result string - }{ - {FromFloat(123.123), "{\"val\":123.123}"}, - {FromString("123.123"), "{\"val\":\"123.123\"}"}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - result, err := json.Marshal(&input) - if err != nil { - t.Errorf("Failed to marshal input '%v': %v", input, err) - } - if string(result) != c.result { - t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result)) - } - } -} - -func TestIntOrStringMarshalJSONUnmarshalYAML(t *testing.T) { - cases := []struct { - input Float32OrString - }{ - {FromFloat(123.123)}, - {FromString("123.123")}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - jsonMarshalled, err := json.Marshal(&input) - if err != nil { - t.Errorf("1: Failed to marshal input: '%v': %v", input, err) - } - - var result FloatOrStringHolder - err = yaml.Unmarshal(jsonMarshalled, &result) - if err != nil { - t.Errorf("2: Failed to unmarshal '%+v': %v", string(jsonMarshalled), err) - } - - if !reflect.DeepEqual(input, result) { - t.Errorf("3: Failed to marshal input '%+v': got %+v", input, result) - } - } -} diff --git a/util/unmarshal.go b/util/unmarshal.go deleted file mode 100644 index d00e9d2..0000000 --- a/util/unmarshal.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2020 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "net/http" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "sync/atomic" - "time" - - "sigs.k8s.io/yaml" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -// Kind ... -// +k8s:deepcopy-gen=false -type Kind interface { - KindValues() []string - String() string -} - -// TODO: Remove global variable -var HttpClient = http.Client{Timeout: time.Duration(1) * time.Second} - -// UnmarshalError ... -// +k8s:deepcopy-gen=false -type UnmarshalError struct { - err error - parameterName string - primitiveType reflect.Kind - objectType reflect.Kind -} - -func (e *UnmarshalError) Error() string { - if e.err == nil { - panic("unmarshalError fail") - } - - var syntaxErr *json.SyntaxError - var unmarshalTypeErr *json.UnmarshalTypeError - if errors.As(e.err, &syntaxErr) { - return fmt.Sprintf("%s has a syntax error %q", e.parameterName, syntaxErr.Error()) - - } else if errors.As(e.err, &unmarshalTypeErr) { - return e.unmarshalMessageError(unmarshalTypeErr) - } - - return e.err.Error() -} - -func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) string { - if err.Struct == "" && err.Field == "" { - primitiveTypeName := e.primitiveType.String() - - // in some cases the e.primitiveType might be invalid, one of the reasons is because it is nil - // default to string in that case - if e.primitiveType == reflect.Invalid { - primitiveTypeName = "string" - } - - var objectTypeName string - if e.objectType != reflect.Invalid { - switch e.objectType { - case reflect.Struct: - objectTypeName = "object" - case reflect.Map: - objectTypeName = "object" - case reflect.Slice: - objectTypeName = "array" - default: - objectTypeName = e.objectType.String() - } - } - return fmt.Sprintf("%s must be %s or %s", e.parameterName, primitiveTypeName, objectTypeName) - - } else if err.Struct != "" && err.Field != "" { - var primitiveTypeName string - value := reflect.New(err.Type) - if valKinds, ok := value.Elem().Interface().(val.Kind); ok { - values := valKinds.KindValues() - if len(values) <= 2 { - primitiveTypeName = strings.Join(values, " or ") - } else { - primitiveTypeName = fmt.Sprintf("%s, %s", strings.Join(values[:len(values)-2], ", "), strings.Join(values[len(values)-2:], " or ")) - } - } else { - primitiveTypeName = err.Type.Name() - } - - return fmt.Sprintf("%s.%s must be %s", e.parameterName, err.Field, primitiveTypeName) - } - - return err.Error() -} - -func LoadExternalResource(url string) (b []byte, err error) { - index := strings.Index(url, "://") - if index == -1 { - b, err = getBytesFromFile(url) - } else { - scheme := url[:index] - switch scheme { - case "http", "https": - b, err = getBytesFromHttp(url) - case "file": - b, err = getBytesFromFile(url[index+3:]) - default: - return nil, fmt.Errorf("unsupported scheme: %q", scheme) - } - } - if err != nil { - return - } - - // TODO: optimize this - // NOTE: In specification, we can declare independent definitions with another file format, so - // we must convert independently yaml source to json format data before unmarshal. - if !json.Valid(b) { - b, err = yaml.YAMLToJSON(b) - if err != nil { - return nil, err - } - return b, nil - } - - return b, nil -} - -func getBytesFromFile(path string) ([]byte, error) { - if WebAssembly() { - return nil, fmt.Errorf("unsupported open file") - } - - // if path is relative, search in include paths - if !filepath.IsAbs(path) { - paths := IncludePaths() - pathFound := false - for i := 0; i < len(paths) && !pathFound; i++ { - sn := filepath.Join(paths[i], path) - _, err := os.Stat(sn) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - return nil, err - } - } else { - path = sn - pathFound = true - } - } - if !pathFound { - return nil, fmt.Errorf("file not found: %q", path) - } - } - - return os.ReadFile(filepath.Clean(path)) -} - -func getBytesFromHttp(url string) ([]byte, error) { - req, err := http.NewRequest(http.MethodGet, url, nil) - if err != nil { - return nil, err - } - - resp, err := HttpClient.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - buf := new(bytes.Buffer) - if _, err = buf.ReadFrom(resp.Body); err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -// +k8s:deepcopy-gen=false -func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { - var valString string - err := UnmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) - if err != nil || valString == "" { - return err - } - - // Assumes that the value inside `data` is a path to a known location. - // Returns the content of the file or a not nil error reference. - data, err = LoadExternalResource(valString) - if err != nil { - return err - } - - data = bytes.TrimSpace(data) - if data[0] != '{' && data[0] != '[' { - return errors.New("invalid external resource definition") - } - - if data[0] == '[' && parameterName != "auth" && parameterName != "secrets" { - return errors.New("invalid external resource definition") - } - - data = bytes.TrimSpace(data) - if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" && parameterName != "schema" { - extractData := map[string]json.RawMessage{} - err = json.Unmarshal(data, &extractData) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*valObject).Kind(), - } - } - - var ok bool - if data, ok = extractData[parameterName]; !ok { - return fmt.Errorf("external resource parameter not found: %q", parameterName) - } - } - - return UnmarshalObject(parameterName, data, valObject) -} - -func UnmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("%s no bytes to unmarshal", parameterName) - } - - isObject := data[0] == '{' || data[0] == '[' - var err error - if isObject { - err = UnmarshalObject(parameterName, data, valStruct) - } else { - err = unmarshalPrimitive(parameterName, data, valPrimitive) - } - - var unmarshalError *UnmarshalError - if errors.As(err, &unmarshalError) { - unmarshalError.objectType = reflect.TypeOf(*valStruct).Kind() - unmarshalError.primitiveType = reflect.TypeOf(*valPrimitive).Kind() - } - - return err -} - -func unmarshalPrimitive[T string | bool](parameterName string, data []byte, value *T) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -func UnmarshalObject[U any](parameterName string, data []byte, value *U) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - objectType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -var defaultIncludePaths atomic.Value - -func init() { - // No execute set include path to suport webassembly - if WebAssembly() { - return - } - - wd, err := os.Getwd() - if err != nil { - panic(err) - } - SetIncludePaths([]string{wd}) -} - -// IncludePaths will return the search path for non-absolute import file -func IncludePaths() []string { - return defaultIncludePaths.Load().([]string) -} - -// SetIncludePaths will update the search path for non-absolute import file -func SetIncludePaths(paths []string) { - for _, path := range paths { - if !filepath.IsAbs(path) { - panic(fmt.Errorf("%s must be an absolute file path", path)) - } - } - - defaultIncludePaths.Store(paths) -} - -func WebAssembly() bool { - return runtime.GOOS == "js" && runtime.GOARCH == "wasm" -} diff --git a/util/unmarshal_benchmark_test.go b/util/unmarshal_benchmark_test.go deleted file mode 100644 index 1a81b41..0000000 --- a/util/unmarshal_benchmark_test.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "fmt" - "testing" -) - -func Benchmark_IncludePaths_Parallel(b *testing.B) { - b.RunParallel(func(p *testing.PB) { - i := 0 - for p.Next() { - IncludePaths() - SetIncludePaths([]string{fmt.Sprintf("%v", i)}) - i++ - } - }) -} diff --git a/util/unmarshal_test.go b/util/unmarshal_test.go deleted file mode 100644 index f7051fb..0000000 --- a/util/unmarshal_test.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/serverlessworkflow/sdk-go/v2/test" -) - -func TestIncludePaths(t *testing.T) { - assert.NotNil(t, IncludePaths()) - assert.True(t, len(IncludePaths()) > 0) - - // update include paths - initialPaths := IncludePaths() - paths := []string{"/root", "/path"} - SetIncludePaths(paths) - assert.Equal(t, IncludePaths(), paths) - - assert.PanicsWithError(t, "1 must be an absolute file path", assert.PanicTestFunc(func() { - SetIncludePaths([]string{"1"}) - })) - - SetIncludePaths(initialPaths) -} - -func Test_loadExternalResource(t *testing.T) { - SetIncludePaths(append(IncludePaths(), filepath.Join(test.CurrentProjectPath()))) - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte("{}")) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - data, err := LoadExternalResource(server.URL + "/test.json") - assert.NoError(t, err) - assert.Equal(t, "{}", string(data)) - - data, err = LoadExternalResource("parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("file://../parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("./parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - _, err = LoadExternalResource("ftp://test.yml") - assert.ErrorContains(t, err, "unsupported scheme: \"ftp\"") -} - -func Test_unmarshalObjectOrFile(t *testing.T) { - t.Run("httptest", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - type listStructString []structString - - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"listStructString":[{"fieldValue": "value"}]}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - structValue := &structString{} - data := []byte(`"fieldValue": "value"`) - err := UnmarshalObjectOrFile("structString", data, structValue) - assert.Error(t, err) - assert.Equal(t, &structString{}, structValue) - - listStructValue := &listStructString{} - data = []byte(`[{"fieldValue": "value"}]`) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - - listStructValue = &listStructString{} - data = []byte(fmt.Sprintf(`"%s/test.json"`, server.URL)) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - }) -} - -func Test_primitiveOrMapType(t *testing.T) { - type dataMap map[string]json.RawMessage - - t.Run("unmarshal", func(t *testing.T) { - var valBool bool - valMap := &dataMap{} - data := []byte(`"value":true`) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`{value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`true`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.True(t, valBool) - - valString := "" - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.Equal(t, `true`, valString) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) - assert.False(t, valBool) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value": "true"}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) - assert.False(t, valBool) - }) - - t.Run("test personalized syntaxError error message", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - - var valString string - valStruct := &structString{} - data := []byte(`{"fieldValue": "value"`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"unexpected end of JSON input\"", err.Error()) - - data = []byte(`{\n "fieldValue": value\n}`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"invalid character '\\\\\\\\' looking for beginning of object key string\"", err.Error()) - // assert.Equal(t, `structBool value '{"fieldValue": value}' is not supported, it has a syntax error "invalid character 'v' looking for beginning of value"`, err.Error()) - }) - - t.Run("test personalized unmarshalTypeError error message", func(t *testing.T) { - type structBool struct { - FieldValue bool `json:"fieldValue"` - } - - var valBool bool - valStruct := &structBool{} - data := []byte(`{ - "fieldValue": "true" -}`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool.fieldValue must be bool", err.Error()) - - valBool = false - valStruct = &structBool{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool must be bool or object", err.Error()) - }) - - t.Run("check json with spaces", func(t *testing.T) { - var valBool bool - valStruct := &dataMap{} - data := []byte(` {"value": "true"} `) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valBool = false - valStruct = &dataMap{} - data = []byte(` true `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valString := "" - valStruct = &dataMap{} - data = []byte(` "true" `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - }) - - t.Run("check tabs", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\t') + `"true"` + string('\t')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\t') + `true` + string('\t')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("check breakline", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\n') + `"true"` + string('\n')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\n') + `true` + string('\n')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("test recursivity and default value", func(t *testing.T) { - valStruct := &structBool{} - data := []byte(`{"fieldValue": false}`) - err := json.Unmarshal(data, valStruct) - assert.NoError(t, err) - assert.False(t, valStruct.FieldValue) - }) -} - -type structBool struct { - FieldValue bool `json:"fieldValue"` -} - -type structBoolUnmarshal structBool - -func (s *structBool) UnmarshalJSON(data []byte) error { - s.FieldValue = true - return UnmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) -} diff --git a/validator/tags.go b/validator/tags.go deleted file mode 100644 index e568aba..0000000 --- a/validator/tags.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -const ( - // TagISO8601Duration is the validate tag for iso8601 time duration format - TagISO8601Duration = "iso8601duration" -) diff --git a/validator/validator.go b/validator/validator.go deleted file mode 100644 index f241f84..0000000 --- a/validator/validator.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "context" - "errors" - "strconv" - - "github.com/relvacode/iso8601" - "github.com/sosodev/duration" - "k8s.io/apimachinery/pkg/util/intstr" - - validator "github.com/go-playground/validator/v10" -) - -// TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation - -type Kind interface { - KindValues() []string - String() string -} - -var validate *validator.Validate - -func init() { - validate = validator.New() - - err := validate.RegisterValidationCtx("iso8601duration", validateISO8601TimeDurationFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidationCtx("iso8601datetime", validateISO8601DatetimeFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidation("oneofkind", oneOfKind) - if err != nil { - panic(err) - } -} - -// GetValidator gets the default validator.Validate reference -func GetValidator() *validator.Validate { - return validate -} - -// ValidateISO8601TimeDuration validate the string is iso8601 duration format -func ValidateISO8601TimeDuration(s string) error { - if s == "" { - return errors.New("could not parse duration string") - } - _, err := duration.Parse(s) - if err != nil { - return errors.New("could not parse duration string") - } - return err -} - -func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601TimeDuration(fl.Field().String()) - return err == nil -} - -// ValidateISO8601Datetime validate the string is iso8601 Datetime format -func ValidateISO8601Datetime(s string) error { - _, err := iso8601.ParseString(s) - return err -} - -func validateISO8601DatetimeFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601Datetime(fl.Field().String()) - return err == nil -} - -func oneOfKind(fl validator.FieldLevel) bool { - if val, ok := fl.Field().Interface().(Kind); ok { - for _, value := range val.KindValues() { - if value == val.String() { - return true - } - } - } - - return false -} - -func ValidateGt0IntStr(value *intstr.IntOrString) bool { - switch value.Type { - case intstr.Int: - if value.IntVal <= 0 { - return false - } - case intstr.String: - v, err := strconv.Atoi(value.StrVal) - if err != nil { - return false - } - - if v <= 0 { - return false - } - } - - return true -} diff --git a/validator/validator_test.go b/validator/validator_test.go deleted file mode 100644 index daab56a..0000000 --- a/validator/validator_test.go +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestValidateISO8601TimeDuration(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "normal_all_designator", - s: "P3Y6M4DT12H30M5S", - err: ``, - }, - { - desp: "normal_second_designator", - s: "PT5S", - err: ``, - }, - { - desp: "fractional_second_designator", - s: "PT0.5S", - err: ``, - }, - { - desp: "empty value", - s: "", - err: `could not parse duration string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601TimeDuration(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestValidateISO8601Timestamp(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "workflow_spec_example", - s: "2021-11-05T08:15:30-05:00", - err: ``, - }, - { - desp: "datetime", - s: "2023-09-08T20:15:46+00:00", - err: ``, - }, - { - desp: "date", - s: "2023-09-08", - err: ``, - }, - { - desp: "time", - s: "13:15:33.074-07:00", - err: "iso8601: Unexpected character `:`", - }, - { - desp: "empty value", - s: "", - err: `iso8601: Cannot parse "": month 0 is not in range 1-12`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601Datetime(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -type testKind string - -func (k testKind) KindValues() []string { - return []string{"test1", "test2"} -} - -func (k testKind) String() string { - return string(k) -} - -type testKindInvalid string - -func (k testKindInvalid) AllValuesInvalid() []string { - return []string{"test1", "test2"} -} - -func (k testKindInvalid) String() string { - return string(k) -} - -func Test_oneOfKind(t *testing.T) { - validate := GetValidator() - - t.Run("kind without kindInvalid", func(t *testing.T) { - spec := struct { - f interface{} - t string - }{ - f: testKindInvalid("test1"), t: "oneofkind", - } - - errs := validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) - - t.Run("kind", func(t *testing.T) { - spec := struct { - f testKind - t string - }{ - f: testKind("test1"), t: "oneofkind", - } - errs := validate.Var(spec.f, spec.t) - assert.NoError(t, errs) - - spec = struct { - f testKind - t string - }{ - f: testKind("test3"), t: "oneofkind", - } - errs = validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) -} - -func TestValidateIntStr(t *testing.T) { - - testCase := []struct { - Desp string - Test *intstr.IntOrString - Return bool - }{ - { - Desp: "success int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - }, - Return: true, - }, - { - Desp: "success string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", - }, - Return: true, - }, - { - Desp: "fail int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - }, - Return: false, - }, - { - Desp: "fail string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", - }, - Return: false, - }, - { - Desp: "fail invalid string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "aa", - }, - Return: false, - }, - } - - for _, c := range testCase { - t.Run(c.Desp, func(t *testing.T) { - valid := ValidateGt0IntStr(c.Test) - assert.Equal(t, c.Return, valid) - }) - } -} diff --git a/validator/workflow.go b/validator/workflow.go deleted file mode 100644 index d5be7b5..0000000 --- a/validator/workflow.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "errors" - "fmt" - "reflect" - "strings" - - validator "github.com/go-playground/validator/v10" -) - -const ( - TagExists string = "exists" - TagRequired string = "required" - TagExclusive string = "exclusive" - - TagRecursiveState string = "recursivestate" - - // States referenced by compensatedBy (as well as any other states that they transition to) must obey following rules: - TagTransitionMainWorkflow string = "transtionmainworkflow" // They should not have any incoming transitions (should not be part of the main workflow control-flow logic) - TagCompensatedbyEventState string = "compensatedbyeventstate" // They cannot be an event state - TagRecursiveCompensation string = "recursivecompensation" // They cannot themselves set their compensatedBy property to true (compensation is not recursive) - TagCompensatedby string = "compensatedby" // They must define the usedForCompensation property and set it to true - TagTransitionUseForCompensation string = "transitionusedforcompensation" // They can transition only to states which also have their usedForCompensation property and set to true -) - -type WorkflowErrors []error - -func (e WorkflowErrors) Error() string { - errors := []string{} - for _, err := range []error(e) { - errors = append(errors, err.Error()) - } - return strings.Join(errors, "\n") -} - -func WorkflowError(err error) error { - if err == nil { - return nil - } - - var invalidErr *validator.InvalidValidationError - if errors.As(err, &invalidErr) { - return err - } - - var validationErrors validator.ValidationErrors - if !errors.As(err, &validationErrors) { - return err - } - - removeNamespace := []string{ - "BaseWorkflow", - "BaseState", - "OperationState", - } - - workflowErrors := []error{} - for _, err := range validationErrors { - // normalize namespace - namespaceList := strings.Split(err.Namespace(), ".") - normalizedNamespaceList := []string{} - for i := range namespaceList { - part := namespaceList[i] - if !contains(removeNamespace, part) { - part := strings.ToLower(part[:1]) + part[1:] - normalizedNamespaceList = append(normalizedNamespaceList, part) - } - } - namespace := strings.Join(normalizedNamespaceList, ".") - - switch err.Tag() { - case "unique": - if err.Param() == "" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate value", namespace)) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate %q", namespace, strings.ToLower(err.Param()))) - } - case "min": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must have the minimum %s", namespace, err.Param())) - case "required_without": - if namespace == "workflow.iD" { - workflowErrors = append(workflowErrors, errors.New("workflow.id required when \"workflow.key\" is not defined")) - } else if namespace == "workflow.key" { - workflowErrors = append(workflowErrors, errors.New("workflow.key required when \"workflow.id\" is not defined")) - } else if err.StructField() == "FunctionRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s required when \"eventRef\" or \"subFlowRef\" is not defined", namespace)) - } else { - workflowErrors = append(workflowErrors, err) - } - case "oneofkind": - value := reflect.New(err.Type()).Elem().Interface().(Kind) - workflowErrors = append(workflowErrors, fmt.Errorf("%s need by one of %s", namespace, value.KindValues())) - case "gt0": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must be greater than 0", namespace)) - case TagExists: - workflowErrors = append(workflowErrors, fmt.Errorf("%s don't exist %q", namespace, err.Value())) - case TagRequired: - workflowErrors = append(workflowErrors, fmt.Errorf("%s is required", namespace)) - case TagExclusive: - if err.StructField() == "ErrorRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s or %s are exclusive", namespace, replaceLastNamespace(namespace, "errorRefs"))) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s exclusive", namespace)) - } - case TagCompensatedby: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is not defined as usedForCompensation", namespace, err.Value())) - case TagCompensatedbyEventState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation and cannot be an event state", namespace, err.Value())) - case TagRecursiveCompensation: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation (cannot themselves set their compensatedBy)", namespace, err.Value())) - case TagRecursiveState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s can't no be recursive %q", namespace, strings.ToLower(err.Param()))) - case TagISO8601Duration: - workflowErrors = append(workflowErrors, fmt.Errorf("%s invalid iso8601 duration %q", namespace, err.Value())) - default: - workflowErrors = append(workflowErrors, err) - } - } - - return WorkflowErrors(workflowErrors) -} - -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} - -func replaceLastNamespace(namespace, replace string) string { - index := strings.LastIndex(namespace, ".") - if index == -1 { - return namespace - } - - return fmt.Sprintf("%s.%s", namespace[:index], replace) -} pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy