From 25c489487adf599b77e4fad7544a89d5d9e13fbc Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 29 Aug 2022 12:56:02 -0300 Subject: [PATCH 001/110] Update CNCF Slack info --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 9c8ef9c..658a254 100644 --- a/README.md +++ b/README.md @@ -61,3 +61,7 @@ func ParseWorkflow(filePath string) (*model.Workflow, error) { ``` The `Workflow` structure then can be used in your application. + +## Slack Channel + +Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. From ddc15cc290f5e4c3ef996aea67893f01bca988ad Mon Sep 17 00:00:00 2001 From: Songlin Yang Date: Mon, 5 Sep 2022 19:55:34 +0800 Subject: [PATCH 002/110] test(*): fix unittest assert.NoError checks and msg (#67) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- parser/parser_test.go | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/parser/parser_test.go b/parser/parser_test.go index 2bdbfa6..22049a2 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -30,10 +30,11 @@ func TestBasicValidation(t *testing.T) { for _, file := range files { if !file.IsDir() { workflow, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.NoError(t, err) - assert.NotEmpty(t, workflow.Name) - assert.NotEmpty(t, workflow.ID) - assert.NotEmpty(t, workflow.States) + if assert.NoError(t, err) { + assert.NotEmpty(t, workflow.Name) + assert.NotEmpty(t, workflow.ID) + assert.NotEmpty(t, workflow.States) + } } } } @@ -251,8 +252,9 @@ func TestFromFile(t *testing.T) { } for file, f := range files { workflow, err := FromFile(file) - assert.NoError(t, err, "Test File", file) - assert.NotNil(t, workflow, "Test File", file) - f(t, workflow) + if assert.NoError(t, err, "Test File %s", file) { + assert.NotNil(t, workflow, "Test File %s", file) + f(t, workflow) + } } } From 073c3224a943cfbe218c37278d14166357d75575 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Tue, 6 Sep 2022 18:51:29 +0800 Subject: [PATCH 003/110] refactor(model): remove unnecessary code for slice length check (#68) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- go.sum | 1 - model/auth.go | 13 +++---- model/auth_test.go | 94 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 100 insertions(+), 8 deletions(-) create mode 100644 model/auth_test.go diff --git a/go.sum b/go.sum index 830ed38..f63a306 100644 --- a/go.sum +++ b/go.sum @@ -44,7 +44,6 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3 h1:0es+/5331RGQPcXlMfP+WrnIIS6dNnNRe0WB02W0F4M= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d h1:3qF+Z8Hkrw9sOhrFHti9TlB1Hkac1x+DNRkv0XQiFjo= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= diff --git a/model/auth.go b/model/auth.go index 60e9aa1..f5c2712 100644 --- a/model/auth.go +++ b/model/auth.go @@ -31,13 +31,12 @@ func init() { func AuthDefinitionsStructLevelValidation(structLevel validator.StructLevel) { authDefs := structLevel.Current().Interface().(AuthDefinitions) dict := map[string]bool{} - if authDefs.Defs != nil && len(authDefs.Defs) > 1 { - for _, a := range authDefs.Defs { - if !dict[a.Name] { - dict[a.Name] = true - } else { - structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") - } + + for _, a := range authDefs.Defs { + if !dict[a.Name] { + dict[a.Name] = true + } else { + structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") } } } diff --git a/model/auth_test.go b/model/auth_test.go new file mode 100644 index 0000000..3a04504 --- /dev/null +++ b/model/auth_test.go @@ -0,0 +1,94 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestAuthDefinitionsStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + authDefs AuthDefinitions + err string + } + testCases := []testCase{ + { + desp: "nil defs", + authDefs: AuthDefinitions{ + Defs: nil, + }, + err: ``, + }, + { + desp: "zero length defs", + authDefs: AuthDefinitions{ + Defs: []Auth{}, + }, + err: ``, + }, + { + desp: "multi unique defs", + authDefs: AuthDefinitions{ + Defs: []Auth{ + { + Name: "1", + }, + { + Name: "2", + }, + { + Name: "3", + }, + }, + }, + err: ``, + }, + { + desp: "multi non-unique defs", + authDefs: AuthDefinitions{ + Defs: []Auth{ + { + Name: "1", + }, + { + Name: "2", + }, + { + Name: "1", + }, + }, + }, + err: `Key: 'AuthDefinitions.Name' Error:Field validation for 'Name' failed on the 'reqnameunique' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.authDefs) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} From 0ec04ff3bb08b62c1b104e82838de3aa77b78abf Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Thu, 8 Sep 2022 22:56:22 +0800 Subject: [PATCH 004/110] fix(*): update auth type to array or string (#71) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/auth.go | 17 ++-- .../workflows/applicationrequest-issue69.json | 79 +++++++++++++++++ .../workflows/applicationrequest.json | 4 +- parser/testdata/workflows/urifiles/auth.json | 17 ++++ ...pplicationrequest.auth.invalid.format.json | 85 +++++++++++++++++++ 5 files changed, 192 insertions(+), 10 deletions(-) create mode 100644 parser/testdata/workflows/applicationrequest-issue69.json create mode 100644 parser/testdata/workflows/urifiles/auth.json create mode 100644 parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json diff --git a/model/auth.go b/model/auth.go index f5c2712..b9a1166 100644 --- a/model/auth.go +++ b/model/auth.go @@ -92,24 +92,25 @@ func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { if len(b) == 0 { return fmt.Errorf("no bytes to unmarshal") } + // See if we can guess based on the first character switch b[0] { - case '{': - return a.unmarshalSingle(b) + case '"': + return a.unmarshalFile(b) case '[': return a.unmarshalMany(b) } - return nil + + return fmt.Errorf("auth value '%s' not support, it must be an array or string", string(b)) } -func (a *AuthDefinitions) unmarshalSingle(data []byte) error { - var auth Auth - err := json.Unmarshal(data, &auth) +func (a *AuthDefinitions) unmarshalFile(data []byte) error { + b, err := unmarshalFile(data) if err != nil { return err } - a.Defs = []Auth{auth} - return nil + + return a.unmarshalMany(b) } func (a *AuthDefinitions) unmarshalMany(data []byte) error { diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json new file mode 100644 index 0000000..b0a9a78 --- /dev/null +++ b/parser/testdata/workflows/applicationrequest-issue69.json @@ -0,0 +1,79 @@ +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": "./testdata/workflows/urifiles/auth.json", + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" + } + ], + "retries": [ + { + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" + } + ], + "states": [ + { + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "transition": { + "nextState": "StartApplication" + } + }, + { + "condition": "{{ $.applicants[?(@.age < 18)] }}", + "transition": { + "nextState": "RejectApplication" + } + } + ], + "default": { + "transition": { + "nextState": "RejectApplication" + } + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "startApplicationWorkflowId" + } + } + ], + "end": { + "terminate": true + } + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "parameters": { + "applicant": "{{ $.applicant }}" + } + } + } + ], + "end": { + "terminate": true + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json index b5fc7da..e2bca99 100644 --- a/parser/testdata/workflows/applicationrequest.json +++ b/parser/testdata/workflows/applicationrequest.json @@ -5,13 +5,13 @@ "description": "Determine if applicant request is valid", "start": "CheckApplication", "specVersion": "0.7", - "auth": { + "auth": [{ "name": "testAuth", "scheme": "bearer", "properties": { "token": "test_token" } - }, + }], "functions": [ { "name": "sendRejectionEmailFunction", diff --git a/parser/testdata/workflows/urifiles/auth.json b/parser/testdata/workflows/urifiles/auth.json new file mode 100644 index 0000000..a3a62aa --- /dev/null +++ b/parser/testdata/workflows/urifiles/auth.json @@ -0,0 +1,17 @@ +[ + { + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } + }, + { + "name": "testAuth2", + "scheme": "basic", + "properties": { + "username": "test_user", + "password": "test_pwd" + } + } +] \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json new file mode 100644 index 0000000..ffbe5be --- /dev/null +++ b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json @@ -0,0 +1,85 @@ +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": { + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } + }, + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" + } + ], + "retries": [ + { + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" + } + ], + "states": [ + { + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "transition": { + "nextState": "StartApplication" + } + }, + { + "condition": "{{ $.applicants[?(@.age < 18)] }}", + "transition": { + "nextState": "RejectApplication" + } + } + ], + "default": { + "transition": { + "nextState": "RejectApplication" + } + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "startApplicationWorkflowId" + } + } + ], + "end": { + "terminate": true + } + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "parameters": { + "applicant": "{{ $.applicant }}" + } + } + } + ], + "end": { + "terminate": true + } + } + ] + } \ No newline at end of file From 49155eb2c9ea072712abd4384896385c1b30bb83 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Fri, 9 Sep 2022 20:33:12 +0800 Subject: [PATCH 005/110] fix(44): add validate for ISO8601 time duration field (#72) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- go.mod | 1 + go.sum | 2 + model/delay_state.go | 47 ++++++++ model/delay_state_test.go | 79 ++++++++++++ model/retry.go | 47 +++++++- model/retry_test.go | 114 ++++++++++++++++++ model/states.go | 7 -- model/util.go | 7 ++ model/util_test.go | 59 +++++++++ model/workflow.go | 2 +- parser/parser_test.go | 2 +- .../applicationrequest-issue44.json | 85 +++++++++++++ 12 files changed, 442 insertions(+), 10 deletions(-) create mode 100644 model/delay_state.go create mode 100644 model/delay_state_test.go create mode 100644 model/retry_test.go create mode 100644 model/util_test.go create mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue44.json diff --git a/go.mod b/go.mod index 88d4311..e8121ee 100644 --- a/go.mod +++ b/go.mod @@ -4,6 +4,7 @@ go 1.19 require ( github.com/go-playground/validator/v10 v10.11.0 + github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.7.0 k8s.io/apimachinery v0.25.0 sigs.k8s.io/yaml v1.3.0 diff --git a/go.sum b/go.sum index f63a306..79dea06 100644 --- a/go.sum +++ b/go.sum @@ -35,6 +35,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 h1:Dz0HrI1AtNSGCE8LXLLqoZU4iuOJXPWndenCsZfstA8= +github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46/go.mod h1:is8FVkzSi7PYLWEXT5MgWhglFsyyiW8ffxAoJqfuFZo= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= diff --git a/model/delay_state.go b/model/delay_state.go new file mode 100644 index 0000000..b327211 --- /dev/null +++ b/model/delay_state.go @@ -0,0 +1,47 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + "github.com/go-playground/validator/v10" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation( + DelayStateStructLevelValidation, + DelayState{}, + ) +} + +// DelayState Causes the workflow execution to delay for a specified duration +type DelayState struct { + BaseState + // Amount of time (ISO 8601 format) to delay + TimeDelay string `json:"timeDelay" validate:"required"` +} + +// DelayStateStructLevelValidation custom validator for DelayState Struct +func DelayStateStructLevelValidation(structLevel validator.StructLevel) { + delayStateObj := structLevel.Current().Interface().(DelayState) + + err := validateISO8601TimeDuration(delayStateObj.TimeDelay) + if err != nil { + structLevel.ReportError(reflect.ValueOf(delayStateObj.TimeDelay), "TimeDelay", "timeDelay", "reqiso8601duration", "") + } +} diff --git a/model/delay_state_test.go b/model/delay_state_test.go new file mode 100644 index 0000000..51a1395 --- /dev/null +++ b/model/delay_state_test.go @@ -0,0 +1,79 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestDelayStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + delayStateObj DelayState + err string + } + testCases := []testCase{ + { + desp: "normal", + delayStateObj: DelayState{ + BaseState: BaseState{ + Name: "1", + Type: "delay", + }, + TimeDelay: "PT5S", + }, + err: ``, + }, + { + desp: "missing required timeDelay", + delayStateObj: DelayState{ + BaseState: BaseState{ + Name: "1", + Type: "delay", + }, + TimeDelay: "", + }, + err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag\nKey: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invalid timeDelay duration", + delayStateObj: DelayState{ + BaseState: BaseState{ + Name: "1", + Type: "delay", + }, + TimeDelay: "P5S", + }, + err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'reqiso8601duration' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.delayStateObj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/retry.go b/model/retry.go index 5f232af..a0ce3c9 100644 --- a/model/retry.go +++ b/model/retry.go @@ -15,10 +15,22 @@ package model import ( - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + "reflect" + + "github.com/go-playground/validator/v10" "k8s.io/apimachinery/pkg/util/intstr" + + "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) +func init() { + val.GetValidator().RegisterStructValidation( + RetryStructLevelValidation, + Retry{}, + ) +} + // Retry ... type Retry struct { // Unique retry strategy name @@ -36,3 +48,36 @@ type Retry struct { // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` } + +// RetryStructLevelValidation custom validator for Retry Struct +func RetryStructLevelValidation(structLevel validator.StructLevel) { + retryObj := structLevel.Current().Interface().(Retry) + + if retryObj.Delay != "" { + err := validateISO8601TimeDuration(retryObj.Delay) + if err != nil { + structLevel.ReportError(reflect.ValueOf(retryObj.Delay), "Delay", "delay", "reqiso8601duration", "") + } + } + + if retryObj.MaxDelay != "" { + err := validateISO8601TimeDuration(retryObj.MaxDelay) + if err != nil { + structLevel.ReportError(reflect.ValueOf(retryObj.MaxDelay), "MaxDelay", "maxDelay", "reqiso8601duration", "") + } + } + + if retryObj.Increment != "" { + err := validateISO8601TimeDuration(retryObj.Increment) + if err != nil { + structLevel.ReportError(reflect.ValueOf(retryObj.Increment), "Increment", "increment", "reqiso8601duration", "") + } + } + + if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { + err := validateISO8601TimeDuration(retryObj.Jitter.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "reqiso8601duration", "") + } + } +} diff --git a/model/retry_test.go b/model/retry_test.go new file mode 100644 index 0000000..18eddac --- /dev/null +++ b/model/retry_test.go @@ -0,0 +1,114 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestRetryStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + retryObj Retry + err string + } + testCases := []testCase{ + { + desp: "normal", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: ``, + }, + { + desp: "missing required name", + retryObj: Retry{ + Name: "", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Name' Error:Field validation for 'Name' failed on the 'required' tag`, + }, + { + desp: "invalid delay duration", + retryObj: Retry{ + Name: "1", + Delay: "P5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invdalid max delay duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "P5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invalid increment duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "P5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invalid jitter duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("P5S"), + }, + err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'reqiso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.retryObj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/states.go b/model/states.go index 13272ae..a717960 100644 --- a/model/states.go +++ b/model/states.go @@ -127,13 +127,6 @@ func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFi // GetMetadata ... func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } -// DelayState Causes the workflow execution to delay for a specified duration -type DelayState struct { - BaseState - // Amount of time (ISO 8601 format) to delay - TimeDelay string `json:"timeDelay" validate:"required"` -} - // EventState This state is used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel type EventState struct { BaseState diff --git a/model/util.go b/model/util.go index 1cfd08b..54f4a62 100644 --- a/model/util.go +++ b/model/util.go @@ -21,6 +21,8 @@ import ( "os" "path/filepath" "strings" + + "github.com/senseyeio/duration" ) const prefix = "file:/" @@ -90,3 +92,8 @@ func unmarshalFile(data []byte) (b []byte, err error) { } return file, nil } + +func validateISO8601TimeDuration(s string) error { + _, err := duration.ParseISO8601(s) + return err +} diff --git a/model/util_test.go b/model/util_test.go new file mode 100644 index 0000000..e6959c8 --- /dev/null +++ b/model/util_test.go @@ -0,0 +1,59 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValidateISO8601TimeDuration(t *testing.T) { + type testCase struct { + desp string + s string + err string + } + testCases := []testCase{ + { + desp: "normal_all_designator", + s: "P3Y6M4DT12H30M5S", + err: ``, + }, + { + desp: "normal_second_designator", + s: "PT5S", + err: ``, + }, + { + desp: "empty value", + s: "", + err: `could not parse duration string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := validateISO8601TimeDuration(tc.s) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 986e497..1e306c0 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -98,7 +98,7 @@ type Workflow struct { States []State `json:"states" validate:"required,min=1"` Events []Event `json:"events,omitempty"` Functions []Function `json:"functions,omitempty"` - Retries []Retry `json:"retries,omitempty"` + Retries []Retry `json:"retries,omitempty" validate:"dive"` } // UnmarshalJSON implementation for json Unmarshal function for the Workflow type diff --git a/parser/parser_test.go b/parser/parser_test.go index 22049a2..b1f588d 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -46,7 +46,7 @@ func TestCustomValidators(t *testing.T) { for _, file := range files { if !file.IsDir() { _, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.Error(t, err) + assert.Error(t, err, "Test File %s", file.Name()) } } } diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json new file mode 100644 index 0000000..fb519d5 --- /dev/null +++ b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json @@ -0,0 +1,85 @@ +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": [{ + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } + }], + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" + } + ], + "retries": [ + { + "name": "TimeoutRetryStrategy", + "delay": "P1S", + "maxAttempts": "5" + } + ], + "states": [ + { + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "transition": { + "nextState": "StartApplication" + } + }, + { + "condition": "{{ $.applicants[?(@.age < 18)] }}", + "transition": { + "nextState": "RejectApplication" + } + } + ], + "default": { + "transition": { + "nextState": "RejectApplication" + } + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "startApplicationWorkflowId" + } + } + ], + "end": { + "terminate": true + } + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "parameters": { + "applicant": "{{ $.applicant }}" + } + } + } + ], + "end": { + "terminate": true + } + } + ] + } \ No newline at end of file From cc7e1a363232e46405934dd2a23de3e33d0c0113 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Tue, 13 Sep 2022 21:01:57 +0800 Subject: [PATCH 006/110] feat(*): change test resources expression to jq (#73) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- .../testdata/workflows/applicationrequest-issue16.sw.yaml | 6 +++--- parser/testdata/workflows/applicationrequest-issue69.json | 6 +++--- parser/testdata/workflows/applicationrequest.json | 6 +++--- .../testdata/workflows/applicationrequest.multiauth.json | 6 +++--- parser/testdata/workflows/applicationrequest.rp.json | 6 +++--- parser/testdata/workflows/applicationrequest.url.json | 6 +++--- parser/testdata/workflows/checkinbox.cron-test.sw.yaml | 4 ++-- parser/testdata/workflows/checkinbox.sw.yaml | 4 ++-- parser/testdata/workflows/eventbaseddataandswitch.sw.json | 2 +- parser/testdata/workflows/eventbasedgreeting.sw.json | 6 +++--- parser/testdata/workflows/eventbasedgreeting.sw.p.json | 6 +++--- .../workflows/eventbasedgreetingexclusive.sw.json | 8 ++++---- .../workflows/eventbasedgreetingnonexclusive.sw.json | 6 +++--- .../testdata/workflows/greetings-constants-file.sw.yaml | 6 +++--- parser/testdata/workflows/greetings-secret-file.sw.yaml | 6 +++--- parser/testdata/workflows/greetings-secret.sw.yaml | 6 +++--- parser/testdata/workflows/greetings.sw.json | 4 ++-- parser/testdata/workflows/greetings.sw.yaml | 6 +++--- .../workflows/witherrors/applicationrequest-issue44.json | 6 +++--- .../applicationrequest.auth.invalid.format.json | 6 +++--- .../workflows/witherrors/applicationrequest.authdupl.json | 6 +++--- 21 files changed, 59 insertions(+), 59 deletions(-) diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml index a8e77ff..a2e19a5 100644 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml @@ -25,9 +25,9 @@ states: - name: CheckApplication type: switch dataConditions: - - condition: "{{ $.applicants[?(@.age >= 18)] }}" + - condition: "${ .applicants | .age >= 18 }" transition: StartApplication - - condition: "{{ $.applicants[?(@.age < 18)] }}" + - condition: "${ .applicants | .age < 18 }" transition: RejectApplication defaultCondition: transition: RejectApplication @@ -44,5 +44,5 @@ states: - functionRef: refName: sendRejectionEmailFunction arguments: - applicant: "{{ $.applicant }}" + applicant: "${ .applicant }" end: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json index b0a9a78..2b2cec2 100644 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ b/parser/testdata/workflows/applicationrequest-issue69.json @@ -25,13 +25,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -66,7 +66,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json index e2bca99..ee43120 100644 --- a/parser/testdata/workflows/applicationrequest.json +++ b/parser/testdata/workflows/applicationrequest.json @@ -31,13 +31,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -72,7 +72,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json index b1bf69c..958c473 100644 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ b/parser/testdata/workflows/applicationrequest.multiauth.json @@ -42,13 +42,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -83,7 +83,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json index ad2acce..bc71b19 100644 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ b/parser/testdata/workflows/applicationrequest.rp.json @@ -15,13 +15,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -56,7 +56,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "arguments": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json index 64e62c6..3dde238 100644 --- a/parser/testdata/workflows/applicationrequest.url.json +++ b/parser/testdata/workflows/applicationrequest.url.json @@ -15,13 +15,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -56,7 +56,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml index cd548fe..8992626 100644 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml @@ -35,11 +35,11 @@ states: transition: SendTextForHighPriority - name: SendTextForHighPriority type: foreach - inputCollection: "{{ $.messages }}" + inputCollection: "${ .messages }" iterationParam: singlemessage actions: - functionRef: refName: sendTextFunction arguments: - message: "{{ $.singlemessage }}" + message: "${ .singlemessage }" end: true \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml index cdcec1e..b6be17f 100644 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ b/parser/testdata/workflows/checkinbox.sw.yaml @@ -38,12 +38,12 @@ states: nextState: SendTextForHighPriority - name: SendTextForHighPriority type: foreach - inputCollection: "{{ $.messages }}" + inputCollection: "${ .messages }" iterationParam: singlemessage actions: - functionRef: refName: sendTextFunction arguments: - message: "{{ $.singlemessage }}" + message: "${ .singlemessage }" end: terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json index 58482be..8da1692 100644 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ b/parser/testdata/workflows/eventbaseddataandswitch.sw.json @@ -25,7 +25,7 @@ "type": "switch", "dataConditions": [ { - "condition": "${ true }", + "condition": "${ true }", "transition": "CheckVisaStatus" } ] diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json index a62a865..e0d66a6 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.json @@ -30,14 +30,14 @@ "GreetingEvent" ], "eventDataFilter": { - "data": "{{ $.data.greet }}" + "data": "${ .data | .greet }" }, "actions": [ { "functionRef": { "refName": "greetingFunction", "arguments": { - "name": "{{ $.greet.name }}" + "name": "${ .greet | .name }" } } } @@ -45,7 +45,7 @@ } ], "stateDataFilter": { - "output": "{{ $.payload.greeting }}" + "output": "${ .payload | .greeting }" }, "end": { "terminate": true diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json index f9beefa..d504bc9 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.p.json @@ -24,14 +24,14 @@ "GreetingEvent" ], "eventDataFilter": { - "data": "{{ $.data.greet }}" + "data": "${ .data | .greet }" }, "actions": [ { "functionRef": { "refName": "greetingFunction", "arguments": { - "name": "{{ $.greet.name }}" + "name": "${ .greet | .name }" } } } @@ -39,7 +39,7 @@ } ], "stateDataFilter": { - "output": "{{ $.payload.greeting }}" + "output": "${ .payload | .greeting }" }, "end": { "terminate": true diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json index 660e3ef..e685e0d 100644 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json @@ -36,14 +36,14 @@ "GreetingEvent" ], "eventDataFilter": { - "data": "{{ $.data.greet }}" + "data": "${ .data | .greet }" }, "actions": [ { "functionRef": { "refName": "greetingFunction", "arguments": { - "name": "{{ $.greet.name }}" + "name": "${ .greet | .name }" } } } @@ -61,7 +61,7 @@ "functionRef": { "refName": "greetingFunction2", "arguments": { - "name": "{{ $.greet.name }}" + "name": "${ .greet | .name }" } } } @@ -69,7 +69,7 @@ } ], "stateDataFilter": { - "output": "{{ $.payload.greeting }}" + "output": "${ .payload | .greeting }" }, "end": { "terminate": true diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json index 30f6354..8a3c78a 100644 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json @@ -37,14 +37,14 @@ "GreetingEvent2" ], "eventDataFilter": { - "data": "{{ $.data.greet }}" + "data": "${ .data | .greet }" }, "actions": [ { "functionRef": { "refName": "greetingFunction", "arguments": { - "name": "{{ $.greet.name }}" + "name": "${ .greet | .name }" } } } @@ -52,7 +52,7 @@ } ], "stateDataFilter": { - "output": "{{ $.payload.greeting }}" + "output": "${ .payload | .greeting }" }, "end": { "terminate": true diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml index 060ace9..a512c33 100644 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ b/parser/testdata/workflows/greetings-constants-file.sw.yaml @@ -31,10 +31,10 @@ states: - functionRef: refName: greetingFunction parameters: - name: "${ $SECRETS.SECRET1 }" + name: "${ SECRETS | .SECRET1 }" actionDataFilter: - dataResultsPath: "$.payload.greeting" + dataResultsPath: "${ .payload | .greeting }" stateDataFilter: - dataOutputPath: "$.greeting" + dataOutputPath: "${ .greeting }" end: terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml index a7afbe9..510632d 100644 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ b/parser/testdata/workflows/greetings-secret-file.sw.yaml @@ -31,10 +31,10 @@ states: - functionRef: refName: greetingFunction parameters: - name: "${ $SECRETS.SECRET1 }" + name: "${ .SECRETS | .SECRET1 }" actionDataFilter: - dataResultsPath: "$.payload.greeting" + dataResultsPath: "${ .payload | .greeting }" stateDataFilter: - dataOutputPath: "$.greeting" + dataOutputPath: "${ .greeting }" end: terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml index 1e6b71f..6d21849 100644 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ b/parser/testdata/workflows/greetings-secret.sw.yaml @@ -32,10 +32,10 @@ states: - functionRef: refName: greetingFunction parameters: - name: "${ $SECRETS.NAME }" + name: "${ .SECRETS | .NAME }" actionDataFilter: - dataResultsPath: "$.payload.greeting" + dataResultsPath: "${ .payload | .greeting }" stateDataFilter: - dataOutputPath: "$.greeting" + dataOutputPath: "${ .greeting }" end: terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json index ba56384..b43eab0 100644 --- a/parser/testdata/workflows/greetings.sw.json +++ b/parser/testdata/workflows/greetings.sw.json @@ -22,11 +22,11 @@ "functionRef": { "refName": "greetingFunction", "parameters": { - "name": "{{ $.person.name }}" + "name": "${ .person | .name }" } }, "actionDataFilter": { - "dataResultsPath": "{{ $.greeting }}" + "dataResultsPath": "${ .greeting }" } } ], diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml index de92c77..9280ca3 100644 --- a/parser/testdata/workflows/greetings.sw.yaml +++ b/parser/testdata/workflows/greetings.sw.yaml @@ -30,10 +30,10 @@ states: - functionRef: refName: greetingFunction parameters: - name: "$.greet.name" + name: "${ .greet | .name }" actionDataFilter: - dataResultsPath: "$.payload.greeting" + dataResultsPath: "${ .payload | .greeting }" stateDataFilter: - dataOutputPath: "$.greeting" + dataOutputPath: "${ .greeting }" end: terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json index fb519d5..8bc0eb5 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json +++ b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json @@ -31,13 +31,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -72,7 +72,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json index ffbe5be..4db741b 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json +++ b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json @@ -31,13 +31,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -72,7 +72,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json index 30f75cd..8beb050 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json @@ -42,13 +42,13 @@ "type": "switch", "dataConditions": [ { - "condition": "{{ $.applicants[?(@.age >= 18)] }}", + "condition": "${ .applicants | .age >= 18 }", "transition": { "nextState": "StartApplication" } }, { - "condition": "{{ $.applicants[?(@.age < 18)] }}", + "condition": "${ .applicants | .age < 18 }", "transition": { "nextState": "RejectApplication" } @@ -83,7 +83,7 @@ "functionRef": { "refName": "sendRejectionEmailFunction", "parameters": { - "applicant": "{{ $.applicant }}" + "applicant": "${ .applicant }" } } } From b5f89fa00b7d01e27a3a6477836c5a4c48efb714 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Fri, 16 Sep 2022 20:12:25 +0800 Subject: [PATCH 007/110] fix(*): validate failed when transition is missing (#75) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/states.go | 18 ++-- model/workflow.go | 18 ++-- parser/parser_test.go | 8 +- .../applicationrequest-issue74.json | 82 +++++++++++++++++++ 4 files changed, 104 insertions(+), 22 deletions(-) create mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue74.json diff --git a/model/states.go b/model/states.go index a717960..f6b8be3 100644 --- a/model/states.go +++ b/model/states.go @@ -135,7 +135,7 @@ type EventState struct { // Define the events to be consumed and optional actions to be performed OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` // State specific timeouts - Timeout EventStateTimeout `json:"timeouts,omitempty"` + Timeout *EventStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON ... @@ -187,7 +187,7 @@ type OperationState struct { // Actions to be performed Actions []Action `json:"actions" validate:"required,min=1,dive"` // State specific timeouts - Timeouts OperationStateTimeout `json:"timeouts,omitempty"` + Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` } // OperationStateTimeout ... @@ -206,7 +206,7 @@ type ParallelState struct { // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` // State specific timeouts - Timeouts ParallelStateTimeout `json:"timeouts,omitempty"` + Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` } // ParallelStateTimeout ... @@ -221,7 +221,7 @@ type InjectState struct { // JSON object which can be set as states data input and can be manipulated via filters Data map[string]interface{} `json:"data" validate:"required,min=1"` // State specific timeouts - Timeouts InjectStateTimeout `json:"timeouts,omitempty"` + Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` } // InjectStateTimeout ... @@ -243,7 +243,7 @@ type ForEachState struct { // Actions to be executed for each of the elements of inputCollection Actions []Action `json:"actions,omitempty"` // State specific timeout - Timeouts ForEachStateTimeout `json:"timeouts,omitempty"` + Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` // Mode Specifies how iterations are to be performed (sequentially or in parallel) Mode ForEachModeType `json:"mode,omitempty"` } @@ -280,7 +280,7 @@ type SleepState struct { // Duration (ISO 8601 duration format) to sleep Duration string `json:"duration" validate:"required"` // Timeouts State specific timeouts - Timeouts SleepStateTimeout `json:"timeouts,omitempty"` + Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` } // SleepStateTimeout ... @@ -301,7 +301,7 @@ type EventBasedSwitchState struct { // Defines conditions evaluated against events EventConditions []EventCondition `json:"eventConditions" validate:"required,min=1,dive"` // State specific timeouts - Timeouts EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` + Timeouts *EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON implementation for json Unmarshal function for the Eventbasedswitch type @@ -395,8 +395,8 @@ type EndEventCondition struct { // DataBasedSwitchState Permits transitions to other states based on data conditions type DataBasedSwitchState struct { BaseSwitchState - DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` - Timeouts DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` + DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` + Timeouts *DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON implementation for json Unmarshal function for the Databasedswitch type diff --git a/model/workflow.go b/model/workflow.go index 1e306c0..0d2f0ef 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -95,7 +95,7 @@ type BaseWorkflow struct { // Workflow base definition type Workflow struct { BaseWorkflow - States []State `json:"states" validate:"required,min=1"` + States []State `json:"states" validate:"required,min=1,dive"` Events []Event `json:"events,omitempty"` Functions []Function `json:"functions,omitempty"` Retries []Retry `json:"retries,omitempty" validate:"dive"` @@ -381,8 +381,8 @@ func (s *Start) UnmarshalJSON(data []byte) error { // DefaultCondition Can be either a transition or end definition type DefaultCondition struct { - Transition Transition `json:"transition,omitempty"` - End End `json:"end,omitempty"` + Transition *Transition `json:"transition,omitempty"` + End *End `json:"end,omitempty"` } // Schedule ... @@ -504,12 +504,12 @@ type OnEvents struct { // Action ... type Action struct { // Unique action definition name - Name string `json:"name,omitempty"` - FunctionRef FunctionRef `json:"functionRef,omitempty"` + Name string `json:"name,omitempty"` + FunctionRef *FunctionRef `json:"functionRef,omitempty"` // References a 'trigger' and 'result' reusable event definitions - EventRef EventRef `json:"eventRef,omitempty"` + EventRef *EventRef `json:"eventRef,omitempty"` // References a sub-workflow to be executed - SubFlowRef WorkflowRef `json:"subFlowRef,omitempty"` + SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` // Sleep Defines time period workflow execution should sleep before / after function execution Sleep Sleep `json:"sleep,omitempty"` // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed @@ -529,8 +529,8 @@ type End struct { // Defines events that should be produced ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` // If set to true, triggers workflow compensation. Default is false - Compensate bool `json:"compensate,omitempty"` - ContinueAs ContinueAs `json:"continueAs,omitempty"` + Compensate bool `json:"compensate,omitempty"` + ContinueAs *ContinueAs `json:"continueAs,omitempty"` } // UnmarshalJSON ... diff --git a/parser/parser_test.go b/parser/parser_test.go index b1f588d..eea8bb4 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -30,10 +30,10 @@ func TestBasicValidation(t *testing.T) { for _, file := range files { if !file.IsDir() { workflow, err := FromFile(filepath.Join(rootPath, file.Name())) - if assert.NoError(t, err) { - assert.NotEmpty(t, workflow.Name) - assert.NotEmpty(t, workflow.ID) - assert.NotEmpty(t, workflow.States) + if assert.NoError(t, err, "Test File %s", file.Name()) { + assert.NotEmpty(t, workflow.Name, "Test File %s", file.Name()) + assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) + assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) } } } diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json new file mode 100644 index 0000000..2a712c6 --- /dev/null +++ b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json @@ -0,0 +1,82 @@ +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": [{ + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } + }], + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" + } + ], + "retries": [ + { + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" + } + ], + "states": [ + { + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .applicants | .age >= 18 }" + }, + { + "condition": "${ .applicants | .age < 18 }", + "transition": { + "nextState": "RejectApplication" + } + } + ], + "default": { + "transition": { + "nextState": "RejectApplication" + } + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "startApplicationWorkflowId" + } + } + ], + "end": { + "terminate": true + } + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "parameters": { + "applicant": "${ .applicant }" + } + } + } + ], + "end": { + "terminate": true + } + } + ] + } \ No newline at end of file From e72f6dbf9ecbe8c98939fbb259eac6dbaf6cf4b1 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Fri, 23 Sep 2022 19:48:55 +0800 Subject: [PATCH 008/110] fix(*): validate stateExecTimeout iso8601 (#76) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- go.mod | 2 +- go.sum | 4 +- model/auth.go | 1 + model/sleep_state.go | 55 ++++++ model/sleep_state_test.go | 69 +++++++ model/state_exec_timeout.go | 91 +++++++++ model/state_exec_timeout_test.go | 177 ++++++++++++++++++ model/states.go | 18 +- model/workflow.go | 27 --- .../workflows/greetings_sleep.sw.json | 46 +++++ 10 files changed, 444 insertions(+), 46 deletions(-) create mode 100644 model/sleep_state.go create mode 100644 model/sleep_state_test.go create mode 100644 model/state_exec_timeout.go create mode 100644 model/state_exec_timeout_test.go create mode 100644 parser/testdata/workflows/greetings_sleep.sw.json diff --git a/go.mod b/go.mod index e8121ee..20b0ffd 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/go-playground/validator/v10 v10.11.0 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.7.0 - k8s.io/apimachinery v0.25.0 + k8s.io/apimachinery v0.25.1 sigs.k8s.io/yaml v1.3.0 ) diff --git a/go.sum b/go.sum index 79dea06..4dbb5e1 100644 --- a/go.sum +++ b/go.sum @@ -93,8 +93,8 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/apimachinery v0.25.0 h1:MlP0r6+3XbkUG2itd6vp3oxbtdQLQI94fD5gCS+gnoU= -k8s.io/apimachinery v0.25.0/go.mod h1:qMx9eAk0sZQGsXGu86fab8tZdffHbwUfsvzqKn4mfB0= +k8s.io/apimachinery v0.25.1 h1:t0XrnmCEHVgJlR2arwO8Awp9ylluDic706WePaYCBTI= +k8s.io/apimachinery v0.25.1/go.mod h1:hqqA1X0bsgsxI6dXsJ4HnNTBOmJNxyPp8dw3u2fSHwA= k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= diff --git a/model/auth.go b/model/auth.go index b9a1166..4c0120a 100644 --- a/model/auth.go +++ b/model/auth.go @@ -90,6 +90,7 @@ type Auth struct { // UnmarshalJSON implements json.Unmarshaler func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { if len(b) == 0 { + // TODO: Normalize error messages return fmt.Errorf("no bytes to unmarshal") } diff --git a/model/sleep_state.go b/model/sleep_state.go new file mode 100644 index 0000000..907df8a --- /dev/null +++ b/model/sleep_state.go @@ -0,0 +1,55 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +// SleepState suspends workflow execution for a given time duration. +type SleepState struct { + BaseState + + // Duration (ISO 8601 duration format) to sleep + Duration string `json:"duration" validate:"required"` + // Timeouts State specific timeouts + Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` +} + +// SleepStateTimeout defines timeout settings for sleep state +type SleepStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` +} + +// SleepStateStructLevelValidation custom validator for SleepState +func SleepStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + sleepState := structLevel.Current().Interface().(SleepState) + + err := validateISO8601TimeDuration(sleepState.Duration) + if err != nil { + structLevel.ReportError(reflect.ValueOf(sleepState.Duration), "Duration", "duration", "reqiso8601duration", "") + } +} + +func init() { + val.GetValidator().RegisterStructValidationCtx( + SleepStateStructLevelValidation, + SleepState{}, + ) +} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go new file mode 100644 index 0000000..34e39cc --- /dev/null +++ b/model/sleep_state_test.go @@ -0,0 +1,69 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestSleepStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + state SleepState + err string + } + testCases := []testCase{ + { + desp: "normal duration", + state: SleepState{ + BaseState: BaseState{ + Name: "1", + Type: "sleep", + }, + Duration: "PT10S", + }, + err: ``, + }, + { + desp: "invalid duration", + state: SleepState{ + BaseState: BaseState{ + Name: "1", + Type: "sleep", + }, + Duration: "T10S", + }, + err: `Key: 'SleepState.Duration' Error:Field validation for 'Duration' failed on the 'reqiso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.state) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go new file mode 100644 index 0000000..81c9a0f --- /dev/null +++ b/model/state_exec_timeout.go @@ -0,0 +1,91 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +// StateExecTimeout defines workflow state execution timeout +type StateExecTimeout struct { + // Single state execution timeout, not including retries (ISO 8601 duration format) + Single string `json:"single,omitempty"` + // Total state execution timeout, including retries (ISO 8601 duration format) + Total string `json:"total" validate:"required"` +} + +// just define another type to unmarshal object, so the UnmarshalJSON will not called recursively +type stateExecTimeoutForUnmarshal StateExecTimeout + +// UnmarshalJSON unmarshal StateExecTimeout object from json bytes +func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { + // We must trim the leading space, because we use first byte to detect data's type + data = bytes.TrimSpace(data) + if len(data) == 0 { + // TODO: Normalize error messages + return fmt.Errorf("no bytes to unmarshal") + } + + var err error + switch data[0] { + case '"': + s.Total, err = unmarshalString(data) + return err + case '{': + var v stateExecTimeoutForUnmarshal + err = json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + + *s = StateExecTimeout(v) + return nil + } + + return fmt.Errorf("stateExecTimeout value '%s' not support, it must be an object or string", string(data)) +} + +// StateExecTimeoutStructLevelValidation custom validator for StateExecTimeout +func StateExecTimeoutStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + timeoutObj := structLevel.Current().Interface().(StateExecTimeout) + + // TODO: use Custom Validation Functions tags for iso8601duration + err := validateISO8601TimeDuration(timeoutObj.Total) + if err != nil { + structLevel.ReportError(reflect.ValueOf(timeoutObj.Total), "Total", "total", "reqiso8601duration", "") + } + + if timeoutObj.Single != "" { + err = validateISO8601TimeDuration(timeoutObj.Single) + if err != nil { + structLevel.ReportError(reflect.ValueOf(timeoutObj.Single), "Single", "single", "reqiso8601duration", "") + } + } +} + +func init() { + val.GetValidator().RegisterStructValidationCtx( + StateExecTimeoutStructLevelValidation, + StateExecTimeout{}, + ) +} diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go new file mode 100644 index 0000000..fd84174 --- /dev/null +++ b/model/state_exec_timeout_test.go @@ -0,0 +1,177 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + + expect *StateExecTimeout + err string + } + testCases := []testCase{ + { + desp: "normal string", + data: `"PT10S"`, + + expect: &StateExecTimeout{ + Single: "", + Total: "PT10S", + }, + err: ``, + }, + { + desp: "normal object with total", + data: `{ + "total": "PT10S" + }`, + + expect: &StateExecTimeout{ + Single: "", + Total: "PT10S", + }, + err: ``, + }, + { + desp: "normal object with total & single", + data: `{ + "single": "PT1S", + "total": "PT10S" + }`, + + expect: &StateExecTimeout{ + Single: "PT1S", + Total: "PT10S", + }, + err: ``, + }, + { + desp: "invalid string or object", + data: `PT10S`, + + expect: &StateExecTimeout{}, + err: `stateExecTimeout value 'PT10S' not support, it must be an object or string`, + }, + { + desp: "invalid total type", + data: `{ + "single": "PT1S", + "total": 10 + }`, + + expect: &StateExecTimeout{}, + err: `json: cannot unmarshal number into Go struct field stateExecTimeoutForUnmarshal.total of type string`, + }, + { + desp: "invalid single type", + data: `{ + "single": 1, + "total": "PT10S" + }`, + + expect: &StateExecTimeout{ + Single: "", + Total: "PT10S", + }, + err: `json: cannot unmarshal number into Go struct field stateExecTimeoutForUnmarshal.single of type string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + actual := &StateExecTimeout{} + err := actual.UnmarshalJSON([]byte(tc.data)) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, actual) + }) + } +} + +func TestStateExecTimeoutStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + timeout StateExecTimeout + err string + } + testCases := []testCase{ + { + desp: "normal total", + timeout: StateExecTimeout{ + Total: "PT10S", + }, + err: ``, + }, + { + desp: "normal total & single", + timeout: StateExecTimeout{ + Single: "PT10S", + Total: "PT10S", + }, + err: ``, + }, + { + desp: "missing total", + timeout: StateExecTimeout{ + Single: "PT10S", + Total: "", + }, + err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'required' tag\nKey: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invalid total duration", + timeout: StateExecTimeout{ + Single: "PT10S", + Total: "T10S", + }, + err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'reqiso8601duration' tag`, + }, + { + desp: "invalid single duration", + timeout: StateExecTimeout{ + Single: "T10S", + Total: "PT10S", + }, + err: `Key: 'StateExecTimeout.Single' Error:Field validation for 'Single' failed on the 'reqiso8601duration' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.timeout) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/states.go b/model/states.go index f6b8be3..b11c788 100644 --- a/model/states.go +++ b/model/states.go @@ -77,7 +77,7 @@ type State interface { // BaseState ... type BaseState struct { // Unique State id - ID string `json:"id,omitempty" validate:"omitempty,min=1"` + ID string `json:"id,omitempty"` // State name Name string `json:"name" validate:"required"` // State type @@ -89,7 +89,7 @@ type BaseState struct { // State data filter StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` // Unique Name of a workflow state which is responsible for compensation of this state - CompensatedBy string `json:"compensatedBy,omitempty" validate:"omitempty,min=1"` + CompensatedBy string `json:"compensatedBy,omitempty"` // If true, this state is used to compensate another state. Default is false UsedForCompensation bool `json:"usedForCompensation,omitempty"` // State end definition @@ -274,20 +274,6 @@ type CallbackStateTimeout struct { EventTimeout string `json:"eventTimeout,omitempty"` } -// SleepState ... -type SleepState struct { - BaseState - // Duration (ISO 8601 duration format) to sleep - Duration string `json:"duration" validate:"required"` - // Timeouts State specific timeouts - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` -} - -// SleepStateTimeout ... -type SleepStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - // BaseSwitchState ... type BaseSwitchState struct { BaseState diff --git a/model/workflow.go b/model/workflow.go index 0d2f0ef..0a97f86 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -316,33 +316,6 @@ func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { return nil } -// StateExecTimeout ... -type StateExecTimeout struct { - // Single state execution timeout, not including retries (ISO 8601 duration format) - Single string `json:"single,omitempty" validate:"omitempty,min=1"` - // Total state execution timeout, including retries (ISO 8601 duration format) - Total string `json:"total" validate:"required"` -} - -// UnmarshalJSON ... -func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - stateTimeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &stateTimeout); err != nil { - s.Total, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("total", stateTimeout, &s.Total); err != nil { - return err - } - if err := unmarshalKey("single", stateTimeout, &s.Single); err != nil { - return err - } - return nil -} - // Error declaration for workflow definitions type Error struct { // Name Domain-specific error name diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json new file mode 100644 index 0000000..ab6d89b --- /dev/null +++ b/parser/testdata/workflows/greetings_sleep.sw.json @@ -0,0 +1,46 @@ +{ + "id": "greeting", + "version": "1.0", + "name": "Greeting Workflow", + "description": "Greet Someone", + "specVersion": "0.7", + "start": { + "stateName": "Greet" + }, + "functions": [ + { + "name": "greetingFunction", + "operation": "file://myapis/greetingapis.json#greeting" + } + ], + "states": [ + { + "name": "SleepHere", + "type": "sleep", + "timeouts": { + "stateExecTimeout": "PT10S" + }, + "duration": "PT40S" + }, + { + "name": "Greet", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "greetingFunction", + "parameters": { + "name": "${ .person | .name }" + } + }, + "actionDataFilter": { + "dataResultsPath": "${ .greeting }" + } + } + ], + "end": { + "terminate": true + } + } + ] +} \ No newline at end of file From 1293519bdb27444a7178a221846cd94f9c2b6c8f Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 26 Sep 2022 20:52:20 +0800 Subject: [PATCH 009/110] refactor(*): add iso8601duration validate tag (#78) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/delay_state.go | 11 ++---- model/delay_state_test.go | 4 +-- model/retry.go | 33 ++++-------------- model/retry_test.go | 15 +++++--- model/sleep_state.go | 27 +-------------- model/sleep_state_test.go | 2 +- model/state_exec_timeout.go | 34 ++---------------- model/state_exec_timeout_test.go | 6 ++-- model/util.go | 7 ---- model/util_test.go | 44 ------------------------ validator/tags.go | 20 +++++++++++ validator/validator.go | 19 ++++++++++ validator/validator_test.go | 59 ++++++++++++++++++++++++++++++++ 13 files changed, 127 insertions(+), 154 deletions(-) create mode 100644 validator/tags.go create mode 100644 validator/validator_test.go diff --git a/model/delay_state.go b/model/delay_state.go index b327211..6f506ff 100644 --- a/model/delay_state.go +++ b/model/delay_state.go @@ -15,8 +15,6 @@ package model import ( - "reflect" - "github.com/go-playground/validator/v10" val "github.com/serverlessworkflow/sdk-go/v2/validator" @@ -33,15 +31,10 @@ func init() { type DelayState struct { BaseState // Amount of time (ISO 8601 format) to delay - TimeDelay string `json:"timeDelay" validate:"required"` + TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` } // DelayStateStructLevelValidation custom validator for DelayState Struct func DelayStateStructLevelValidation(structLevel validator.StructLevel) { - delayStateObj := structLevel.Current().Interface().(DelayState) - - err := validateISO8601TimeDuration(delayStateObj.TimeDelay) - if err != nil { - structLevel.ReportError(reflect.ValueOf(delayStateObj.TimeDelay), "TimeDelay", "timeDelay", "reqiso8601duration", "") - } + // TODO } diff --git a/model/delay_state_test.go b/model/delay_state_test.go index 51a1395..258e143 100644 --- a/model/delay_state_test.go +++ b/model/delay_state_test.go @@ -49,7 +49,7 @@ func TestDelayStateStructLevelValidation(t *testing.T) { }, TimeDelay: "", }, - err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag\nKey: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'reqiso8601duration' tag`, + err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag`, }, { desp: "invalid timeDelay duration", @@ -60,7 +60,7 @@ func TestDelayStateStructLevelValidation(t *testing.T) { }, TimeDelay: "P5S", }, - err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'reqiso8601duration' tag`, + err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'iso8601duration' tag`, }, } for _, tc := range testCases { diff --git a/model/retry.go b/model/retry.go index a0ce3c9..c49bce4 100644 --- a/model/retry.go +++ b/model/retry.go @@ -36,16 +36,18 @@ type Retry struct { // Unique retry strategy name Name string `json:"name" validate:"required"` // Time delay between retry attempts (ISO 8601 duration format) - Delay string `json:"delay,omitempty"` + Delay string `json:"delay,omitempty" validate:"omitempty,iso8601duration"` // Maximum time delay between retry attempts (ISO 8601 duration format) - MaxDelay string `json:"maxDelay,omitempty"` + MaxDelay string `json:"maxDelay,omitempty" validate:"omitempty,iso8601duration"` // Static value by which the delay increases during each attempt (ISO 8601 time format) - Increment string `json:"increment,omitempty"` + Increment string `json:"increment,omitempty" validate:"omitempty,iso8601duration"` // Numeric value, if specified the delay between retries is multiplied by this value. Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` // Maximum number of retry attempts. MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` + // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) + // TODO: make iso8601duration compatible this type Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` } @@ -53,31 +55,10 @@ type Retry struct { func RetryStructLevelValidation(structLevel validator.StructLevel) { retryObj := structLevel.Current().Interface().(Retry) - if retryObj.Delay != "" { - err := validateISO8601TimeDuration(retryObj.Delay) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Delay), "Delay", "delay", "reqiso8601duration", "") - } - } - - if retryObj.MaxDelay != "" { - err := validateISO8601TimeDuration(retryObj.MaxDelay) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.MaxDelay), "MaxDelay", "maxDelay", "reqiso8601duration", "") - } - } - - if retryObj.Increment != "" { - err := validateISO8601TimeDuration(retryObj.Increment) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Increment), "Increment", "increment", "reqiso8601duration", "") - } - } - if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { - err := validateISO8601TimeDuration(retryObj.Jitter.StrVal) + err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "reqiso8601duration", "") + structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") } } } diff --git a/model/retry_test.go b/model/retry_test.go index 18eddac..228345e 100644 --- a/model/retry_test.go +++ b/model/retry_test.go @@ -41,6 +41,13 @@ func TestRetryStructLevelValidation(t *testing.T) { }, err: ``, }, + { + desp: "normal with all optinal", + retryObj: Retry{ + Name: "1", + }, + err: ``, + }, { desp: "missing required name", retryObj: Retry{ @@ -61,7 +68,7 @@ func TestRetryStructLevelValidation(t *testing.T) { Increment: "PT5S", Jitter: floatstr.FromString("PT5S"), }, - err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'reqiso8601duration' tag`, + err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'iso8601duration' tag`, }, { desp: "invdalid max delay duration", @@ -72,7 +79,7 @@ func TestRetryStructLevelValidation(t *testing.T) { Increment: "PT5S", Jitter: floatstr.FromString("PT5S"), }, - err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'reqiso8601duration' tag`, + err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'iso8601duration' tag`, }, { desp: "invalid increment duration", @@ -83,7 +90,7 @@ func TestRetryStructLevelValidation(t *testing.T) { Increment: "P5S", Jitter: floatstr.FromString("PT5S"), }, - err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'reqiso8601duration' tag`, + err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'iso8601duration' tag`, }, { desp: "invalid jitter duration", @@ -94,7 +101,7 @@ func TestRetryStructLevelValidation(t *testing.T) { Increment: "PT5S", Jitter: floatstr.FromString("P5S"), }, - err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'reqiso8601duration' tag`, + err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'iso8601duration' tag`, }, } diff --git a/model/sleep_state.go b/model/sleep_state.go index 907df8a..9ce7a0f 100644 --- a/model/sleep_state.go +++ b/model/sleep_state.go @@ -14,20 +14,12 @@ package model -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - // SleepState suspends workflow execution for a given time duration. type SleepState struct { BaseState // Duration (ISO 8601 duration format) to sleep - Duration string `json:"duration" validate:"required"` + Duration string `json:"duration" validate:"required,iso8601duration"` // Timeouts State specific timeouts Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` } @@ -36,20 +28,3 @@ type SleepState struct { type SleepStateTimeout struct { StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` } - -// SleepStateStructLevelValidation custom validator for SleepState -func SleepStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - sleepState := structLevel.Current().Interface().(SleepState) - - err := validateISO8601TimeDuration(sleepState.Duration) - if err != nil { - structLevel.ReportError(reflect.ValueOf(sleepState.Duration), "Duration", "duration", "reqiso8601duration", "") - } -} - -func init() { - val.GetValidator().RegisterStructValidationCtx( - SleepStateStructLevelValidation, - SleepState{}, - ) -} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go index 34e39cc..4fc531d 100644 --- a/model/sleep_state_test.go +++ b/model/sleep_state_test.go @@ -49,7 +49,7 @@ func TestSleepStateStructLevelValidation(t *testing.T) { }, Duration: "T10S", }, - err: `Key: 'SleepState.Duration' Error:Field validation for 'Duration' failed on the 'reqiso8601duration' tag`, + err: `Key: 'SleepState.Duration' Error:Field validation for 'Duration' failed on the 'iso8601duration' tag`, }, } diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index 81c9a0f..1799d2e 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -16,21 +16,16 @@ package model import ( "bytes" - "context" "encoding/json" "fmt" - "reflect" - - validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) // StateExecTimeout defines workflow state execution timeout type StateExecTimeout struct { // Single state execution timeout, not including retries (ISO 8601 duration format) - Single string `json:"single,omitempty"` + Single string `json:"single,omitempty" validate:"omitempty,iso8601duration"` // Total state execution timeout, including retries (ISO 8601 duration format) - Total string `json:"total" validate:"required"` + Total string `json:"total" validate:"required,iso8601duration"` } // just define another type to unmarshal object, so the UnmarshalJSON will not called recursively @@ -64,28 +59,3 @@ func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { return fmt.Errorf("stateExecTimeout value '%s' not support, it must be an object or string", string(data)) } - -// StateExecTimeoutStructLevelValidation custom validator for StateExecTimeout -func StateExecTimeoutStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - timeoutObj := structLevel.Current().Interface().(StateExecTimeout) - - // TODO: use Custom Validation Functions tags for iso8601duration - err := validateISO8601TimeDuration(timeoutObj.Total) - if err != nil { - structLevel.ReportError(reflect.ValueOf(timeoutObj.Total), "Total", "total", "reqiso8601duration", "") - } - - if timeoutObj.Single != "" { - err = validateISO8601TimeDuration(timeoutObj.Single) - if err != nil { - structLevel.ReportError(reflect.ValueOf(timeoutObj.Single), "Single", "single", "reqiso8601duration", "") - } - } -} - -func init() { - val.GetValidator().RegisterStructValidationCtx( - StateExecTimeoutStructLevelValidation, - StateExecTimeout{}, - ) -} diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go index fd84174..0dd3fe1 100644 --- a/model/state_exec_timeout_test.go +++ b/model/state_exec_timeout_test.go @@ -142,7 +142,7 @@ func TestStateExecTimeoutStructLevelValidation(t *testing.T) { Single: "PT10S", Total: "", }, - err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'required' tag\nKey: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'reqiso8601duration' tag`, + err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'required' tag`, }, { desp: "invalid total duration", @@ -150,7 +150,7 @@ func TestStateExecTimeoutStructLevelValidation(t *testing.T) { Single: "PT10S", Total: "T10S", }, - err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'reqiso8601duration' tag`, + err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'iso8601duration' tag`, }, { desp: "invalid single duration", @@ -158,7 +158,7 @@ func TestStateExecTimeoutStructLevelValidation(t *testing.T) { Single: "T10S", Total: "PT10S", }, - err: `Key: 'StateExecTimeout.Single' Error:Field validation for 'Single' failed on the 'reqiso8601duration' tag`, + err: `Key: 'StateExecTimeout.Single' Error:Field validation for 'Single' failed on the 'iso8601duration' tag`, }, } for _, tc := range testCases { diff --git a/model/util.go b/model/util.go index 54f4a62..1cfd08b 100644 --- a/model/util.go +++ b/model/util.go @@ -21,8 +21,6 @@ import ( "os" "path/filepath" "strings" - - "github.com/senseyeio/duration" ) const prefix = "file:/" @@ -92,8 +90,3 @@ func unmarshalFile(data []byte) (b []byte, err error) { } return file, nil } - -func validateISO8601TimeDuration(s string) error { - _, err := duration.ParseISO8601(s) - return err -} diff --git a/model/util_test.go b/model/util_test.go index e6959c8..c960f3c 100644 --- a/model/util_test.go +++ b/model/util_test.go @@ -13,47 +13,3 @@ // limitations under the License. package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestValidateISO8601TimeDuration(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "normal_all_designator", - s: "P3Y6M4DT12H30M5S", - err: ``, - }, - { - desp: "normal_second_designator", - s: "PT5S", - err: ``, - }, - { - desp: "empty value", - s: "", - err: `could not parse duration string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := validateISO8601TimeDuration(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/validator/tags.go b/validator/tags.go new file mode 100644 index 0000000..e568aba --- /dev/null +++ b/validator/tags.go @@ -0,0 +1,20 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validator + +const ( + // TagISO8601Duration is the validate tag for iso8601 time duration format + TagISO8601Duration = "iso8601duration" +) diff --git a/validator/validator.go b/validator/validator.go index fbff15f..d83309c 100644 --- a/validator/validator.go +++ b/validator/validator.go @@ -15,7 +15,10 @@ package validator import ( + "context" + validator "github.com/go-playground/validator/v10" + "github.com/senseyeio/duration" ) // TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation @@ -24,9 +27,25 @@ var validate *validator.Validate func init() { validate = validator.New() + + err := validate.RegisterValidationCtx("iso8601duration", validateISO8601TimeDurationFunc) + if err != nil { + panic(err) + } } // GetValidator gets the default validator.Validate reference func GetValidator() *validator.Validate { return validate } + +// ValidateISO8601TimeDuration validate the string is iso8601 duration format +func ValidateISO8601TimeDuration(s string) error { + _, err := duration.ParseISO8601(s) + return err +} + +func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) bool { + err := ValidateISO8601TimeDuration(fl.Field().String()) + return err == nil +} diff --git a/validator/validator_test.go b/validator/validator_test.go new file mode 100644 index 0000000..b81df33 --- /dev/null +++ b/validator/validator_test.go @@ -0,0 +1,59 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validator + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValidateISO8601TimeDuration(t *testing.T) { + type testCase struct { + desp string + s string + err string + } + testCases := []testCase{ + { + desp: "normal_all_designator", + s: "P3Y6M4DT12H30M5S", + err: ``, + }, + { + desp: "normal_second_designator", + s: "PT5S", + err: ``, + }, + { + desp: "empty value", + s: "", + err: `could not parse duration string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := ValidateISO8601TimeDuration(tc.s) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} From 828391ac1cf101898655df7ad09f8d4501b94302 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Tue, 27 Sep 2022 19:20:42 +0800 Subject: [PATCH 010/110] feat(*): add v0.8 event data filter's useData field (#79) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/event_data_filter.go | 54 +++++++++++++ model/event_data_filter_test.go | 81 +++++++++++++++++++ model/workflow.go | 8 -- .../workflows/eventbasedgreeting.sw.json | 3 +- 4 files changed, 137 insertions(+), 9 deletions(-) create mode 100644 model/event_data_filter.go create mode 100644 model/event_data_filter_test.go diff --git a/model/event_data_filter.go b/model/event_data_filter.go new file mode 100644 index 0000000..5193a4d --- /dev/null +++ b/model/event_data_filter.go @@ -0,0 +1,54 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "bytes" + "encoding/json" + "fmt" +) + +// EventDataFilter used to filter consumed event payloads. +type EventDataFilter struct { + // UseData represent where event payload is added/merged to state data. If it's false, data & toStateData should be ignored. + // Defaults to true. + UseData bool `json:"useData,omitempty"` + + // Workflow expression that filters of the event data (payload) + Data string `json:"data,omitempty"` + // Workflow expression that selects a state data element to which the event payload should be added/merged into. If not specified, denotes, the top-level state data element. + ToStateData string `json:"toStateData,omitempty"` +} + +type eventDataFilterForUnmarshal EventDataFilter + +func (f *EventDataFilter) UnmarshalJSON(data []byte) error { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + v := eventDataFilterForUnmarshal{ + UseData: true, + } + err := json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + + *f = EventDataFilter(v) + return nil +} diff --git a/model/event_data_filter_test.go b/model/event_data_filter_test.go new file mode 100644 index 0000000..1d267ec --- /dev/null +++ b/model/event_data_filter_test.go @@ -0,0 +1,81 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEventDataFilterUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect EventDataFilter + err string + } + testCases := []testCase{ + { + desp: "normal test", + data: `{"data": "1", "toStateData": "2"}`, + expect: EventDataFilter{ + UseData: true, + Data: "1", + ToStateData: "2", + }, + err: ``, + }, + { + desp: "add UseData to false", + data: `{"UseData": false, "data": "1", "toStateData": "2"}`, + expect: EventDataFilter{ + UseData: false, + Data: "1", + ToStateData: "2", + }, + err: ``, + }, + { + desp: "empty data", + data: ` `, + expect: EventDataFilter{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json format", + data: `{"data": 1, "toStateData": "2"}`, + expect: EventDataFilter{}, + err: `json: cannot unmarshal number into Go struct field eventDataFilterForUnmarshal.data of type string`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v EventDataFilter + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 0a97f86..f6cf70f 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -560,14 +560,6 @@ type StateDataFilter struct { Output string `json:"output,omitempty"` } -// EventDataFilter ... -type EventDataFilter struct { - // Workflow expression that filters of the event data (payload) - Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the event payload should be added/merged into. If not specified, denotes, the top-level state data element. - ToStateData string `json:"toStateData,omitempty"` -} - // Branch Definition type Branch struct { // Branch name diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json index e0d66a6..a5cff3e 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.json @@ -30,7 +30,8 @@ "GreetingEvent" ], "eventDataFilter": { - "data": "${ .data | .greet }" + "data": "${ .data | .greet }", + "useData": false }, "actions": [ { From eb32b3bbe418a2d7c254bafb56fc1ca41cd23f85 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 27 Sep 2022 16:50:49 -0300 Subject: [PATCH 011/110] v08 spec alignment - Support custom function type definition (#80) Signed-off-by: spolti Signed-off-by: spolti --- go.mod | 2 +- go.sum | 9 +++- model/function.go | 25 +++++++---- parser/parser_test.go | 11 +++++ .../greetings-custom-function.sw.yaml | 42 +++++++++++++++++++ 5 files changed, 78 insertions(+), 11 deletions(-) create mode 100644 parser/testdata/workflows/greetings-custom-function.sw.yaml diff --git a/go.mod b/go.mod index 20b0ffd..af6d842 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/serverlessworkflow/sdk-go/v2 go 1.19 require ( - github.com/go-playground/validator/v10 v10.11.0 + github.com/go-playground/validator/v10 v10.11.1 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.7.0 k8s.io/apimachinery v0.25.1 diff --git a/go.sum b/go.sum index 4dbb5e1..53840d5 100644 --- a/go.sum +++ b/go.sum @@ -11,8 +11,8 @@ github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.11.0 h1:0W+xRM511GY47Yy3bZUbJVitCNg2BOGlCyvTqsp/xIw= -github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= +github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= +github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= @@ -37,6 +37,8 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 h1:Dz0HrI1AtNSGCE8LXLLqoZU4iuOJXPWndenCsZfstA8= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46/go.mod h1:is8FVkzSi7PYLWEXT5MgWhglFsyyiW8ffxAoJqfuFZo= +github.com/serverlessworkflow/sdk-go v1.0.0 h1:XsRtESODZhyvwHYgARH2VuubiZytME3jiJ61zwGj2YQ= +github.com/serverlessworkflow/sdk-go v1.0.0/go.mod h1:y8Va8RTSHEGShsyISobSCKiniKiBKTUne73pY5sux0E= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= @@ -87,6 +89,8 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= @@ -99,5 +103,6 @@ k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/model/function.go b/model/function.go index 662b341..74ba1b9 100644 --- a/model/function.go +++ b/model/function.go @@ -17,18 +17,26 @@ package model import "encoding/json" const ( - // FunctionTypeREST ... + // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service + // operation that needs to be invoked, separated by a '#'. FunctionTypeREST FunctionType = "rest" - // FunctionTypeRPC ... + // FunctionTypeRPC a combination of the gRPC proto document URI and the particular service name and service method + // name that needs to be invoked, separated by a '#'. FunctionTypeRPC FunctionType = "rpc" - // FunctionTypeExpression ... + // FunctionTypeExpression defines the expression syntax. FunctionTypeExpression FunctionType = "expression" - // FunctionTypeGraphQL ... + // FunctionTypeGraphQL a combination of the GraphQL schema definition URI and the particular service name and + // service method name that needs to be invoked, separated by a '#' FunctionTypeGraphQL FunctionType = "graphql" - // FunctionTypeAsyncAPI ... + // FunctionTypeAsyncAPI a combination of the AsyncApi definition document URI and the particular service operation + // that needs to be invoked, separated by a '#' FunctionTypeAsyncAPI FunctionType = "asyncapi" - // FunctionTypeOData ... + // FunctionTypeOData a combination of the GraphQL schema definition URI and the particular service name and service + // method name that needs to be invoked, separated by a '#' FunctionTypeOData FunctionType = "odata" + // FunctionTypeCustom property defines a list of function types that are set by the specification. Some runtime + // implementations might support additional function types that extend the ones defined in the specification + FunctionTypeCustom FunctionType = "custom" ) // FunctionType ... @@ -39,9 +47,10 @@ type Function struct { Common // Unique function name Name string `json:"name" validate:"required"` - // If type is `rest`, #. If type is `rpc`, ##. If type is `expression`, defines the workflow expression. + // If type is `rest`, #. If type is `rpc`, ##. + // If type is `expression`, defines the workflow expression. If the type is `custom`, #. Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `rest`, `rpc`, `expression` or `graphql`. Default is `rest` + // Defines the function type. Is either `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `odata` or `custom`. Default is `rest` Type FunctionType `json:"type,omitempty"` // References an auth definition name to be used to access to resource defined in the operation parameter AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` diff --git a/parser/parser_test.go b/parser/parser_test.go index eea8bb4..90cef04 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -65,6 +65,17 @@ func TestFromFile(t *testing.T) { assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, + "./testdata/workflows/greetings-custom-function.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.IsType(t, &model.OperationState{}, w.States[0]) + assert.Equal(t, "custom.greeting", w.ID) + assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) + assert.NotEmpty(t, w.Functions[0]) + assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) + assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) + assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) + assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].Name) + }, "./testdata/workflows/eventbaseddataandswitch.sw.json": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Start", w.States[0].GetName()) assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) diff --git a/parser/testdata/workflows/greetings-custom-function.sw.yaml b/parser/testdata/workflows/greetings-custom-function.sw.yaml new file mode 100644 index 0000000..94f67f3 --- /dev/null +++ b/parser/testdata/workflows/greetings-custom-function.sw.yaml @@ -0,0 +1,42 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: custom.greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.8" +start: + stateName: Greet +functions: + - name: greetingCustomFunction + operation: /path/to/my/script/greeting.ts#CustomGreeting + type: custom + +states: + - name: Greet + type: operation + actionMode: sequential + actions: + - name: greetingCustomFunction + functionRef: + refName: greetingCustomFunction + parameters: + name: "${ .greet | .name }" + actionDataFilter: + dataResultsPath: "${ .payload | .greeting }" + stateDataFilter: + dataOutputPath: "${ .greeting }" + end: + terminate: true \ No newline at end of file From beb77e87d79758644b7f3994db22a58b1c2917e7 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 28 Sep 2022 20:03:49 +0800 Subject: [PATCH 012/110] feat(*): add v0.8 action data filter's useResults field (#82) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/action.go | 106 ++++++++++++++++++ model/action_data_filter.go | 57 ++++++++++ model/action_data_filter_test.go | 83 ++++++++++++++ model/action_test.go | 15 +++ model/function.go | 32 ------ model/workflow.go | 67 ----------- .../greetings-constants-file.sw.yaml | 2 +- .../workflows/greetings-secret-file.sw.yaml | 2 +- .../workflows/greetings-secret.sw.yaml | 2 +- parser/testdata/workflows/greetings.sw.json | 2 +- parser/testdata/workflows/greetings.sw.yaml | 2 +- .../workflows/greetings_sleep.sw.json | 2 +- 12 files changed, 267 insertions(+), 105 deletions(-) create mode 100644 model/action.go create mode 100644 model/action_data_filter.go create mode 100644 model/action_data_filter_test.go create mode 100644 model/action_test.go diff --git a/model/action.go b/model/action.go new file mode 100644 index 0000000..d3258d3 --- /dev/null +++ b/model/action.go @@ -0,0 +1,106 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" +) + +// Action ... +type Action struct { + // Unique action definition name + Name string `json:"name,omitempty"` + FunctionRef *FunctionRef `json:"functionRef,omitempty"` + // References a 'trigger' and 'result' reusable event definitions + EventRef *EventRef `json:"eventRef,omitempty"` + // References a sub-workflow to be executed + SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` + // Sleep Defines time period workflow execution should sleep before / after function execution + Sleep Sleep `json:"sleep,omitempty"` + // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed + RetryRef string `json:"retryRef,omitempty"` + // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` + NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` + // List of unique references to defined workflow errors for which the action should be retried. Used only when `autoRetries` is set to `false` + RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` + // Action data filter + ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` +} + +// FunctionRef ... +type FunctionRef struct { + // Name of the referenced function + RefName string `json:"refName" validate:"required"` + // Function arguments + Arguments map[string]interface{} `json:"arguments,omitempty"` + // String containing a valid GraphQL selection set + SelectionSet string `json:"selectionSet,omitempty"` +} + +// UnmarshalJSON ... +func (f *FunctionRef) UnmarshalJSON(data []byte) error { + funcRef := make(map[string]interface{}) + if err := json.Unmarshal(data, &funcRef); err != nil { + f.RefName, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + + f.RefName = requiresNotNilOrEmpty(funcRef["refName"]) + if _, found := funcRef["arguments"]; found { + f.Arguments = funcRef["arguments"].(map[string]interface{}) + } + f.SelectionSet = requiresNotNilOrEmpty(funcRef["selectionSet"]) + + return nil +} + +// WorkflowRef holds a reference for a workflow definition +type WorkflowRef struct { + // Sub-workflow unique id + WorkflowID string `json:"workflowId" validate:"required"` + // Sub-workflow version + Version string `json:"version,omitempty"` +} + +// UnmarshalJSON ... +func (s *WorkflowRef) UnmarshalJSON(data []byte) error { + subflowRef := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &subflowRef); err != nil { + s.WorkflowID, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("version", subflowRef, &s.Version); err != nil { + return err + } + if err := unmarshalKey("workflowId", subflowRef, &s.WorkflowID); err != nil { + return err + } + + return nil +} + +// Sleep ... +type Sleep struct { + // Before Amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. + Before string `json:"before,omitempty"` + // After Amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. + After string `json:"after,omitempty"` +} diff --git a/model/action_data_filter.go b/model/action_data_filter.go new file mode 100644 index 0000000..7fcda3e --- /dev/null +++ b/model/action_data_filter.go @@ -0,0 +1,57 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "bytes" + "encoding/json" + "fmt" +) + +// ActionDataFilter used to filter action data results. +type ActionDataFilter struct { + // Workflow expression that selects state data that the state action can use + FromStateData string `json:"fromStateData,omitempty"` + + // UseResults represent where action data results is added/merged to state data. If it's false, results & toStateData should be ignored. + // Defaults to true. + UseResults bool `json:"useResults,omitempty"` + + // Workflow expression that filters the actions' data results + Results string `json:"results,omitempty"` + // Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element + ToStateData string `json:"toStateData,omitempty"` +} + +type actionDataFilterForUnmarshal ActionDataFilter + +func (f *ActionDataFilter) UnmarshalJSON(data []byte) error { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + v := actionDataFilterForUnmarshal{ + UseResults: true, + } + err := json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + + *f = ActionDataFilter(v) + return nil +} diff --git a/model/action_data_filter_test.go b/model/action_data_filter_test.go new file mode 100644 index 0000000..54811fa --- /dev/null +++ b/model/action_data_filter_test.go @@ -0,0 +1,83 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestActionDataFilterUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect ActionDataFilter + err string + } + testCases := []testCase{ + { + desp: "normal test", + data: `{"fromStateData": "1", "results": "2", "toStateData": "3"}`, + expect: ActionDataFilter{ + FromStateData: "1", + Results: "2", + ToStateData: "3", + UseResults: true, + }, + err: ``, + }, + { + desp: "add UseData to false", + data: `{"fromStateData": "1", "results": "2", "toStateData": "3", "useResults": false}`, + expect: ActionDataFilter{ + FromStateData: "1", + Results: "2", + ToStateData: "3", + UseResults: false, + }, + err: ``, + }, + { + desp: "empty data", + data: ` `, + expect: ActionDataFilter{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json format", + data: `{"fromStateData": 1, "results": "2", "toStateData": "3"}`, + expect: ActionDataFilter{}, + err: `json: cannot unmarshal number into Go struct field actionDataFilterForUnmarshal.fromStateData of type string`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v ActionDataFilter + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/action_test.go b/model/action_test.go new file mode 100644 index 0000000..c960f3c --- /dev/null +++ b/model/action_test.go @@ -0,0 +1,15 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model diff --git a/model/function.go b/model/function.go index 74ba1b9..e3877fe 100644 --- a/model/function.go +++ b/model/function.go @@ -14,8 +14,6 @@ package model -import "encoding/json" - const ( // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service // operation that needs to be invoked, separated by a '#'. @@ -55,33 +53,3 @@ type Function struct { // References an auth definition name to be used to access to resource defined in the operation parameter AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` } - -// FunctionRef ... -type FunctionRef struct { - // Name of the referenced function - RefName string `json:"refName" validate:"required"` - // Function arguments - Arguments map[string]interface{} `json:"arguments,omitempty"` - // String containing a valid GraphQL selection set - SelectionSet string `json:"selectionSet,omitempty"` -} - -// UnmarshalJSON ... -func (f *FunctionRef) UnmarshalJSON(data []byte) error { - funcRef := make(map[string]interface{}) - if err := json.Unmarshal(data, &funcRef); err != nil { - f.RefName, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - - f.RefName = requiresNotNilOrEmpty(funcRef["refName"]) - if _, found := funcRef["arguments"]; found { - f.Arguments = funcRef["arguments"].(map[string]interface{}) - } - f.SelectionSet = requiresNotNilOrEmpty(funcRef["selectionSet"]) - - return nil -} diff --git a/model/workflow.go b/model/workflow.go index f6cf70f..b62f260 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -206,34 +206,6 @@ func (w *Workflow) setDefaults() { } } -// WorkflowRef holds a reference for a workflow definition -type WorkflowRef struct { - // Sub-workflow unique id - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - Version string `json:"version,omitempty"` -} - -// UnmarshalJSON ... -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - subflowRef := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &subflowRef); err != nil { - s.WorkflowID, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("version", subflowRef, &s.Version); err != nil { - return err - } - if err := unmarshalKey("workflowId", subflowRef, &s.WorkflowID); err != nil { - return err - } - - return nil -} - // Timeouts ... type Timeouts struct { // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' @@ -474,27 +446,6 @@ type OnEvents struct { EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` } -// Action ... -type Action struct { - // Unique action definition name - Name string `json:"name,omitempty"` - FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions - EventRef *EventRef `json:"eventRef,omitempty"` - // References a sub-workflow to be executed - SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` - // Sleep Defines time period workflow execution should sleep before / after function execution - Sleep Sleep `json:"sleep,omitempty"` - // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed - RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` - NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. Used only when `autoRetries` is set to `false` - RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Action data filter - ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` -} - // End definition type End struct { // If true, completes all execution flows in the given workflow instance @@ -578,16 +529,6 @@ type BranchTimeouts struct { BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` } -// ActionDataFilter ... -type ActionDataFilter struct { - // Workflow expression that selects state data that the state action can use - FromStateData string `json:"fromStateData,omitempty"` - // Workflow expression that filters the actions' data results - Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element - ToStateData string `json:"toStateData,omitempty"` -} - // DataInputSchema ... type DataInputSchema struct { Schema string `json:"schema" validate:"required"` @@ -656,11 +597,3 @@ func (c *Constants) UnmarshalJSON(data []byte) error { c.Data = constantData return nil } - -// Sleep ... -type Sleep struct { - // Before Amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. - Before string `json:"before,omitempty"` - // After Amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. - After string `json:"after,omitempty"` -} diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml index a512c33..383147d 100644 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ b/parser/testdata/workflows/greetings-constants-file.sw.yaml @@ -33,7 +33,7 @@ states: parameters: name: "${ SECRETS | .SECRET1 }" actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" + toStateData: "${ .payload | .greeting }" stateDataFilter: dataOutputPath: "${ .greeting }" end: diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml index 510632d..193c82b 100644 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ b/parser/testdata/workflows/greetings-secret-file.sw.yaml @@ -33,7 +33,7 @@ states: parameters: name: "${ .SECRETS | .SECRET1 }" actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" + toStateData: "${ .payload | .greeting }" stateDataFilter: dataOutputPath: "${ .greeting }" end: diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml index 6d21849..6fbe5e0 100644 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ b/parser/testdata/workflows/greetings-secret.sw.yaml @@ -34,7 +34,7 @@ states: parameters: name: "${ .SECRETS | .NAME }" actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" + toStateData: "${ .payload | .greeting }" stateDataFilter: dataOutputPath: "${ .greeting }" end: diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json index b43eab0..a9ba296 100644 --- a/parser/testdata/workflows/greetings.sw.json +++ b/parser/testdata/workflows/greetings.sw.json @@ -26,7 +26,7 @@ } }, "actionDataFilter": { - "dataResultsPath": "${ .greeting }" + "toStateData": "${ .greeting }" } } ], diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml index 9280ca3..398d56c 100644 --- a/parser/testdata/workflows/greetings.sw.yaml +++ b/parser/testdata/workflows/greetings.sw.yaml @@ -32,7 +32,7 @@ states: parameters: name: "${ .greet | .name }" actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" + toStateData: "${ .payload | .greeting }" stateDataFilter: dataOutputPath: "${ .greeting }" end: diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json index ab6d89b..c2a1a1a 100644 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ b/parser/testdata/workflows/greetings_sleep.sw.json @@ -34,7 +34,7 @@ } }, "actionDataFilter": { - "dataResultsPath": "${ .greeting }" + "toStateData": "${ .greeting }" } } ], From 9384e9aec2b856c1e0d4b52fffddd2cd3bf8e2c6 Mon Sep 17 00:00:00 2001 From: Li Heng Date: Thu, 29 Sep 2022 22:13:03 +0800 Subject: [PATCH 013/110] feat(*): add v0.8 action data filter's condition field (#83) Signed-off-by: LiHeng.Teh Signed-off-by: LiHeng.Teh --- model/action.go | 2 + parser/parser_test.go | 4 ++ .../workflows/conditionbasedstate.yaml | 40 +++++++++++++++++++ 3 files changed, 46 insertions(+) create mode 100644 parser/testdata/workflows/conditionbasedstate.yaml diff --git a/model/action.go b/model/action.go index d3258d3..04f9545 100644 --- a/model/action.go +++ b/model/action.go @@ -37,6 +37,8 @@ type Action struct { RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` // Action data filter ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` + // Workflow expression evaluated against state data. Must evaluate to true or false + Condition string `json:"condition,omitempty"` } // FunctionRef ... diff --git a/parser/parser_test.go b/parser/parser_test.go index 90cef04..f8966bf 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -82,6 +82,10 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) }, + "./testdata/workflows/conditionbasedstate.yaml": func(t *testing.T, w *model.Workflow) { + operationState := w.States[0].(*model.OperationState) + assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) + }, "./testdata/workflows/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.IsType(t, &model.EventState{}, w.States[0]) diff --git a/parser/testdata/workflows/conditionbasedstate.yaml b/parser/testdata/workflows/conditionbasedstate.yaml new file mode 100644 index 0000000..f42b56d --- /dev/null +++ b/parser/testdata/workflows/conditionbasedstate.yaml @@ -0,0 +1,40 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.8" +start: + stateName: Greet +functions: + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting +states: + - name: Greet + type: operation + actionMode: sequential + actions: + - functionRef: + refName: greetingFunction + parameters: + name: "${ .greet | .name }" + actionDataFilter: + toStateData: "${ .payload | .greeting }" + condition: "${ .applicants | .age < 18 }" + stateDataFilter: + dataOutputPath: "${ .greeting }" + end: + terminate: true \ No newline at end of file From 503471e8c981dff1e0c58651da75f05dd90d5463 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 29 Sep 2022 11:13:47 -0300 Subject: [PATCH 014/110] v08 spec alignment - Workflow name no longer a required property (#81) * v08 spec alignment - Workflow name no longer a required property Signed-off-by: spolti * remove validate Signed-off-by: spolti Signed-off-by: spolti --- model/workflow.go | 2 +- parser/parser_test.go | 33 ++++++++++++++++--- ...ion.sw.yaml => greetings-v08-spec.sw.yaml} | 1 - 3 files changed, 29 insertions(+), 7 deletions(-) rename parser/testdata/workflows/{greetings-custom-function.sw.yaml => greetings-v08-spec.sw.yaml} (98%) diff --git a/model/workflow.go b/model/workflow.go index b62f260..bbaca6c 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -58,7 +58,7 @@ type BaseWorkflow struct { // Key Domain-specific workflow identifier Key string `json:"key,omitempty" validate:"omitempty,min=1"` // Workflow name - Name string `json:"name" validate:"required"` + Name string `json:"name,omitempty"` // Workflow description Description string `json:"description,omitempty"` // Workflow version diff --git a/parser/parser_test.go b/parser/parser_test.go index f8966bf..3c019de 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -31,7 +31,6 @@ func TestBasicValidation(t *testing.T) { if !file.IsDir() { workflow, err := FromFile(filepath.Join(rootPath, file.Name())) if assert.NoError(t, err, "Test File %s", file.Name()) { - assert.NotEmpty(t, workflow.Name, "Test File %s", file.Name()) assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) } @@ -54,18 +53,21 @@ func TestCustomValidators(t *testing.T) { func TestFromFile(t *testing.T) { files := map[string]func(*testing.T, *model.Workflow){ "./testdata/workflows/greetings.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) assert.Equal(t, "greeting", w.ID) assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, "./testdata/workflows/greetings.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "greeting", w.ID) assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, - "./testdata/workflows/greetings-custom-function.sw.yaml": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/greetings-v08-spec.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Empty(t, w.Name) assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "custom.greeting", w.ID) assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) @@ -77,6 +79,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].Name) }, "./testdata/workflows/eventbaseddataandswitch.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Switch Transitions", w.Name) assert.Equal(t, "Start", w.States[0].GetName()) assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) @@ -87,6 +90,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) }, "./testdata/workflows/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) @@ -96,6 +100,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, true, eventState.Exclusive) }, "./testdata/workflows/eventbasedgreetingexclusive.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.Equal(t, "GreetingEvent2", w.Events[1].Name) assert.IsType(t, &model.EventState{}, w.States[0]) @@ -107,6 +112,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, true, eventState.Exclusive) }, "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.Equal(t, "GreetingEvent2", w.Events[1].Name) assert.IsType(t, &model.EventState{}, w.States[0]) @@ -118,6 +124,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, false, eventState.Exclusive) }, "./testdata/workflows/eventbasedgreeting.sw.p.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) @@ -126,6 +133,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) }, "./testdata/workflows/eventbasedswitch.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Switch Transitions", w.Name) assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.EventBasedSwitchState) assert.NotNil(t, eventState) @@ -134,6 +142,7 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) }, "./testdata/workflows/applicationrequest.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -155,6 +164,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "test_token", bearerProperties) }, "./testdata/workflows/applicationrequest.multiauth.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -179,9 +189,9 @@ func TestFromFile(t *testing.T) { basicProperties := w.Auth.Defs[1].Properties.(*model.BasicAuthProperties) assert.Equal(t, "test_user", basicProperties.Username) assert.Equal(t, "test_pwd", basicProperties.Password) - }, "./testdata/workflows/applicationrequest.rp.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -198,6 +208,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) }, "./testdata/workflows/checkinbox.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Check Inbox Workflow", w.Name) assert.IsType(t, &model.OperationState{}, w.States[0]) operationState := w.States[0].(*model.OperationState) assert.NotNil(t, operationState) @@ -206,6 +217,7 @@ func TestFromFile(t *testing.T) { }, // validates: https://github.com/serverlessworkflow/specification/pull/175/ "./testdata/workflows/provisionorders.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Provision Orders", w.Name) assert.IsType(t, &model.OperationState{}, w.States[0]) operationState := w.States[0].(*model.OperationState) assert.NotNil(t, operationState) @@ -217,12 +229,16 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) - }, "./testdata/workflows/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { + }, + "./testdata/workflows/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Check Inbox Workflow", w.Name) assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) assert.False(t, w.States[1].GetEnd().Terminate) - }, "./testdata/workflows/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { + }, + "./testdata/workflows/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, dataBaseSwitchState) @@ -231,6 +247,7 @@ func TestFromFile(t *testing.T) { }, // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 "./testdata/workflows/patientonboarding.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Patient Onboarding Workflow", w.Name) assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) assert.NotNil(t, eventState) @@ -240,26 +257,32 @@ func TestFromFile(t *testing.T) { assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) }, "./testdata/workflows/greetings-secret.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) assert.Len(t, w.Secrets, 1) }, "./testdata/workflows/greetings-secret-file.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) assert.Len(t, w.Secrets, 3) }, "./testdata/workflows/greetings-constants-file.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) assert.NotEmpty(t, w.Constants) assert.NotEmpty(t, w.Constants.Data["Translations"]) }, "./testdata/workflows/roomreadings.timeouts.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) assert.NotNil(t, w.Timeouts) assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) }, "./testdata/workflows/roomreadings.timeouts.file.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) assert.NotNil(t, w.Timeouts) assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) }, "./testdata/workflows/purchaseorderworkflow.sw.json": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Purchase Order Workflow", w.Name) assert.NotNil(t, w.Timeouts) assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) diff --git a/parser/testdata/workflows/greetings-custom-function.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml similarity index 98% rename from parser/testdata/workflows/greetings-custom-function.sw.yaml rename to parser/testdata/workflows/greetings-v08-spec.sw.yaml index 94f67f3..f5858dd 100644 --- a/parser/testdata/workflows/greetings-custom-function.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -14,7 +14,6 @@ id: custom.greeting version: '1.0' -name: Greeting Workflow description: Greet Someone specVersion: "0.8" start: From 3af7606c263b9568d4527c076d676c5dfb37a068 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Fri, 30 Sep 2022 11:22:14 -0300 Subject: [PATCH 015/110] v08 spec alignment - Workflow start no longer a required property (#84) * v08 spec alignment - Workflow start no longer a required property Signed-off-by: spolti * Update model/workflow.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: spolti Signed-off-by: spolti --- model/workflow.go | 2 +- parser/parser_test.go | 1 + parser/testdata/workflows/greetings-v08-spec.sw.yaml | 2 -- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/model/workflow.go b/model/workflow.go index bbaca6c..ba86143 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -63,7 +63,7 @@ type BaseWorkflow struct { Description string `json:"description,omitempty"` // Workflow version Version string `json:"version" validate:"omitempty,min=1"` - Start *Start `json:"start" validate:"required"` + Start *Start `json:"start,omitempty"` // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important qualities Annotations []string `json:"annotations,omitempty"` // DataInputSchema URI of the JSON Schema used to validate the workflow data input diff --git a/parser/parser_test.go b/parser/parser_test.go index 3c019de..d9d02cd 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -68,6 +68,7 @@ func TestFromFile(t *testing.T) { }, "./testdata/workflows/greetings-v08-spec.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.Empty(t, w.Name) + assert.Empty(t, w.Start) assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "custom.greeting", w.ID) assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index f5858dd..756f801 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -16,8 +16,6 @@ id: custom.greeting version: '1.0' description: Greet Someone specVersion: "0.8" -start: - stateName: Greet functions: - name: greetingCustomFunction operation: /path/to/my/script/greeting.ts#CustomGreeting From ddce57902533679f08e55b9110d2be6195282af8 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Fri, 30 Sep 2022 12:42:44 -0300 Subject: [PATCH 016/110] =?UTF-8?q?v08=20spec=20alignment=20-=20ForEach=20?= =?UTF-8?q?state=20iterationParam=20no=20longer=20a=20require=E2=80=A6=20(?= =?UTF-8?q?#85)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * v08 spec alignment - ForEach state iterationParam no longer a required property Signed-off-by: spolti * Update model/states.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: spolti Signed-off-by: spolti --- go.sum | 5 ----- model/states.go | 2 +- .../testdata/workflows/greetings-v08-spec.sw.yaml | 14 ++++++++++++-- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/go.sum b/go.sum index 53840d5..a82bf18 100644 --- a/go.sum +++ b/go.sum @@ -37,8 +37,6 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 h1:Dz0HrI1AtNSGCE8LXLLqoZU4iuOJXPWndenCsZfstA8= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46/go.mod h1:is8FVkzSi7PYLWEXT5MgWhglFsyyiW8ffxAoJqfuFZo= -github.com/serverlessworkflow/sdk-go v1.0.0 h1:XsRtESODZhyvwHYgARH2VuubiZytME3jiJ61zwGj2YQ= -github.com/serverlessworkflow/sdk-go v1.0.0/go.mod h1:y8Va8RTSHEGShsyISobSCKiniKiBKTUne73pY5sux0E= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= @@ -89,8 +87,6 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= @@ -103,6 +99,5 @@ k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/model/states.go b/model/states.go index b11c788..f2c379d 100644 --- a/model/states.go +++ b/model/states.go @@ -237,7 +237,7 @@ type ForEachState struct { // Workflow expression specifying an array element of the states data to add the results of each iteration OutputCollection string `json:"outputCollection,omitempty"` // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array - IterationParam string `json:"iterationParam" validate:"required"` + IterationParam string `json:"iterationParam,omitempty"` // Specifies how upper bound on how many iterations may run in parallel BatchSize intstr.IntOrString `json:"batchSize,omitempty"` // Actions to be executed for each of the elements of inputCollection diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 756f801..c29be6b 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -20,6 +20,8 @@ functions: - name: greetingCustomFunction operation: /path/to/my/script/greeting.ts#CustomGreeting type: custom + - name: sendTextFunction + operation: http://myapis.org/inboxapi.json#sendText states: - name: Greet @@ -35,5 +37,13 @@ states: dataResultsPath: "${ .payload | .greeting }" stateDataFilter: dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file + transition: SendTextForHighPriority + - name: SendTextForHighPriority + type: foreach + inputCollection: "${ .messages }" + actions: + - functionRef: + refName: sendTextFunction + arguments: + message: "${ .singlemessage }" + end: true From ba75ee10f920fb7f20a0a0a6197a13ce075d756d Mon Sep 17 00:00:00 2001 From: Li Heng Date: Mon, 3 Oct 2022 23:19:07 +0800 Subject: [PATCH 017/110] refactor to table test (#89) Signed-off-by: LiHeng Signed-off-by: LiHeng --- parser/parser_test.go | 521 ++++++++++++++++++++++-------------------- 1 file changed, 279 insertions(+), 242 deletions(-) diff --git a/parser/parser_test.go b/parser/parser_test.go index d9d02cd..6a07113 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -51,249 +51,286 @@ func TestCustomValidators(t *testing.T) { } func TestFromFile(t *testing.T) { - files := map[string]func(*testing.T, *model.Workflow){ - "./testdata/workflows/greetings.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - }, - "./testdata/workflows/greetings.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) - assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - }, - "./testdata/workflows/greetings-v08-spec.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Empty(t, w.Name) - assert.Empty(t, w.Start) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "custom.greeting", w.ID) - assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) - assert.NotEmpty(t, w.Functions[0]) - assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) - assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) - assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].Name) - }, - "./testdata/workflows/eventbaseddataandswitch.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.Equal(t, "Start", w.States[0].GetName()) - assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) - }, - "./testdata/workflows/conditionbasedstate.yaml": func(t *testing.T, w *model.Workflow) { - operationState := w.States[0].(*model.OperationState) - assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) - }, - "./testdata/workflows/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - "./testdata/workflows/eventbasedgreetingexclusive.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) - assert.Equal(t, false, eventState.Exclusive) - }, - "./testdata/workflows/eventbasedgreeting.sw.p.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - }, - "./testdata/workflows/eventbasedswitch.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.EventBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.EventConditions) - assert.NotEmpty(t, eventState.Name) - assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) - }, - "./testdata/workflows/applicationrequest.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 1) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token - assert.Equal(t, "test_token", bearerProperties) - }, - "./testdata/workflows/applicationrequest.multiauth.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 2) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token - assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", w.Auth.Defs[1].Name) - assert.Equal(t, model.AuthTypeBasic, w.Auth.Defs[1].Scheme) - basicProperties := w.Auth.Defs[1].Properties.(*model.BasicAuthProperties) - assert.Equal(t, "test_user", basicProperties.Username) - assert.Equal(t, "test_pwd", basicProperties.Password) - }, - "./testdata/workflows/applicationrequest.rp.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - "./testdata/workflows/applicationrequest.url.json": func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - "./testdata/workflows/checkinbox.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, w.States, 2) - }, - // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/workflows/provisionorders.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Provision Orders", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, operationState.OnErrors, 3) - assert.Equal(t, "Missing order id", operationState.OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", operationState.OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", operationState.OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) - }, - "./testdata/workflows/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) - assert.False(t, w.States[1].GetEnd().Terminate) - }, - "./testdata/workflows/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) - assert.NotNil(t, dataBaseSwitchState) - assert.NotEmpty(t, dataBaseSwitchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].GetName()) - }, - // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/workflows/patientonboarding.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Patient Onboarding Workflow", w.Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, w.Retries) - assert.Len(t, w.Retries, 1) - assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) - assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) - }, - "./testdata/workflows/greetings-secret.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 1) - }, - "./testdata/workflows/greetings-secret-file.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 3) - }, - "./testdata/workflows/greetings-constants-file.sw.yaml": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotEmpty(t, w.Constants) - assert.NotEmpty(t, w.Constants.Data["Translations"]) - }, - "./testdata/workflows/roomreadings.timeouts.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - "./testdata/workflows/roomreadings.timeouts.file.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - "./testdata/workflows/purchaseorderworkflow.sw.json": func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Purchase Order Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) + files := []struct { + name string + f func(*testing.T, *model.Workflow) + }{ + { + "./testdata/workflows/greetings.sw.json", + func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) + assert.Equal(t, "greeting", w.ID) + assert.IsType(t, &model.OperationState{}, w.States[0]) + assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + }, + }, { + "./testdata/workflows/greetings.sw.yaml", + func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) + assert.IsType(t, &model.OperationState{}, w.States[0]) + assert.Equal(t, "greeting", w.ID) + assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) + assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) + assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + }, + }, { + "./testdata/workflows/greetings-v08-spec.sw.yaml", + func(t *testing.T, w *model.Workflow) { + assert.Empty(t, w.Name) + assert.Empty(t, w.Start) + assert.IsType(t, &model.OperationState{}, w.States[0]) + assert.Equal(t, "custom.greeting", w.ID) + assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) + assert.NotEmpty(t, w.Functions[0]) + assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) + assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) + assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) + assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].Name) + }, + }, { + "./testdata/workflows/eventbaseddataandswitch.sw.json", + func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Switch Transitions", w.Name) + assert.Equal(t, "Start", w.States[0].GetName()) + assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) + }, + }, { + "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { + operationState := w.States[0].(*model.OperationState) + assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) + }, + }, { + "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) + assert.Equal(t, "GreetingEvent", w.Events[0].Name) + assert.IsType(t, &model.EventState{}, w.States[0]) + eventState := w.States[0].(*model.EventState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.OnEvents) + assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) + assert.Equal(t, true, eventState.Exclusive) + }, + }, { + "./testdata/workflows/eventbasedgreetingexclusive.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) + assert.Equal(t, "GreetingEvent", w.Events[0].Name) + assert.Equal(t, "GreetingEvent2", w.Events[1].Name) + assert.IsType(t, &model.EventState{}, w.States[0]) + eventState := w.States[0].(*model.EventState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.OnEvents) + assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) + assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) + assert.Equal(t, true, eventState.Exclusive) + }, + }, { + "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) + assert.Equal(t, "GreetingEvent", w.Events[0].Name) + assert.Equal(t, "GreetingEvent2", w.Events[1].Name) + assert.IsType(t, &model.EventState{}, w.States[0]) + eventState := w.States[0].(*model.EventState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.OnEvents) + assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) + assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) + assert.Equal(t, false, eventState.Exclusive) + }, + }, { + "./testdata/workflows/eventbasedgreeting.sw.p.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Greeting Workflow", w.Name) + assert.Equal(t, "GreetingEvent", w.Events[0].Name) + assert.IsType(t, &model.EventState{}, w.States[0]) + eventState := w.States[0].(*model.EventState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.OnEvents) + assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) + }, + }, { + "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Event Based Switch Transitions", w.Name) + assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) + eventState := w.States[0].(*model.EventBasedSwitchState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.EventConditions) + assert.NotEmpty(t, eventState.Name) + assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) + }, + }, { + "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + eventState := w.States[0].(*model.DataBasedSwitchState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.DataConditions) + assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) + assert.Equal(t, "CheckApplication", w.Start.StateName) + assert.IsType(t, &model.OperationState{}, w.States[1]) + operationState := w.States[1].(*model.OperationState) + assert.NotNil(t, operationState) + assert.NotEmpty(t, operationState.Actions) + assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) + assert.NotNil(t, w.Auth) + assert.NotNil(t, w.Auth.Defs) + assert.Equal(t, len(w.Auth.Defs), 1) + assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) + assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) + bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token + assert.Equal(t, "test_token", bearerProperties) + }, + }, { + "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + eventState := w.States[0].(*model.DataBasedSwitchState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.DataConditions) + assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) + assert.Equal(t, "CheckApplication", w.Start.StateName) + assert.IsType(t, &model.OperationState{}, w.States[1]) + operationState := w.States[1].(*model.OperationState) + assert.NotNil(t, operationState) + assert.NotEmpty(t, operationState.Actions) + assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) + assert.NotNil(t, w.Auth) + assert.NotNil(t, w.Auth.Defs) + assert.Equal(t, len(w.Auth.Defs), 2) + assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) + assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) + bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token + assert.Equal(t, "test_token", bearerProperties) + assert.Equal(t, "testAuth2", w.Auth.Defs[1].Name) + assert.Equal(t, model.AuthTypeBasic, w.Auth.Defs[1].Scheme) + basicProperties := w.Auth.Defs[1].Properties.(*model.BasicAuthProperties) + assert.Equal(t, "test_user", basicProperties.Username) + assert.Equal(t, "test_pwd", basicProperties.Password) + }, + }, { + "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + eventState := w.States[0].(*model.DataBasedSwitchState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.DataConditions) + assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) + }, + }, { + "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + eventState := w.States[0].(*model.DataBasedSwitchState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.DataConditions) + assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) + }, + }, { + "./testdata/workflows/checkinbox.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Check Inbox Workflow", w.Name) + assert.IsType(t, &model.OperationState{}, w.States[0]) + operationState := w.States[0].(*model.OperationState) + assert.NotNil(t, operationState) + assert.NotEmpty(t, operationState.Actions) + assert.Len(t, w.States, 2) + }, + }, { + // validates: https://github.com/serverlessworkflow/specification/pull/175/ + "./testdata/workflows/provisionorders.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Provision Orders", w.Name) + assert.IsType(t, &model.OperationState{}, w.States[0]) + operationState := w.States[0].(*model.OperationState) + assert.NotNil(t, operationState) + assert.NotEmpty(t, operationState.Actions) + assert.Len(t, operationState.OnErrors, 3) + assert.Equal(t, "Missing order id", operationState.OnErrors[0].ErrorRef) + assert.Equal(t, "MissingId", operationState.OnErrors[0].Transition.NextState) + assert.Equal(t, "Missing order item", operationState.OnErrors[1].ErrorRef) + assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) + assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) + assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) + }, + }, { + "./testdata/workflows/checkinbox.cron-test.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Check Inbox Workflow", w.Name) + assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) + assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) + assert.False(t, w.States[1].GetEnd().Terminate) + }, + }, { + "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Applicant Request Decision Workflow", w.Name) + assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) + dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) + assert.NotNil(t, dataBaseSwitchState) + assert.NotEmpty(t, dataBaseSwitchState.DataConditions) + assert.Equal(t, "CheckApplication", w.States[0].GetName()) + }, + }, { + // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 + "./testdata/workflows/patientonboarding.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Patient Onboarding Workflow", w.Name) + assert.IsType(t, &model.EventState{}, w.States[0]) + eventState := w.States[0].(*model.EventState) + assert.NotNil(t, eventState) + assert.NotEmpty(t, w.Retries) + assert.Len(t, w.Retries, 1) + assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) + assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) + }, + }, { + "./testdata/workflows/greetings-secret.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) + assert.Len(t, w.Secrets, 1) + }, + }, { + "./testdata/workflows/greetings-secret-file.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) + assert.Len(t, w.Secrets, 3) + }, + }, { + "./testdata/workflows/greetings-constants-file.sw.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Greeting Workflow", w.Name) + assert.NotEmpty(t, w.Constants) + assert.NotEmpty(t, w.Constants.Data["Translations"]) + }, + }, { + "./testdata/workflows/roomreadings.timeouts.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, + }, { + "./testdata/workflows/roomreadings.timeouts.file.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, + }, { + "./testdata/workflows/purchaseorderworkflow.sw.json", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Purchase Order Workflow", w.Name) + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, }, } - for file, f := range files { - workflow, err := FromFile(file) - if assert.NoError(t, err, "Test File %s", file) { - assert.NotNil(t, workflow, "Test File %s", file) - f(t, workflow) - } + for _, file := range files { + t.Run( + file.name, func(t *testing.T) { + workflow, err := FromFile(file.name) + if assert.NoError(t, err, "Test File %s", file) { + assert.NotNil(t, workflow, "Test File %s", file) + file.f(t, workflow) + } + }, + ) } } From 4be1862519717d103a059840570c31e52f2b46dd Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Mon, 3 Oct 2022 13:22:38 -0300 Subject: [PATCH 018/110] v08 spec aligment - Added resultEventTimeout for action eventref (#90) * v08 spec aligment - Added resultEventTimeout for action eventref Signed-off-by: spolti * review update Signed-off-by: spolti Signed-off-by: spolti --- README.md | 2 +- model/event.go | 24 ++++++- model/event_test.go | 63 +++++++++++++++++++ .../workflows/greetings-v08-spec.sw.yaml | 4 ++ 4 files changed, 89 insertions(+), 4 deletions(-) create mode 100644 model/event_test.go diff --git a/README.md b/README.md index 658a254..27725ec 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ Current status of features implemented in the SDK is listed in the table below: | Parse workflow JSON and YAML definitions | :heavy_check_mark: | | Programmatically build workflow definitions | :no_entry_sign: | | Validate workflow definitions (Schema) | :heavy_check_mark: | -| Validate workflow definitions (Integrity) | :heavy_check_mark: | +| Validate workflow definitions (Integrity) | :heavy_check_mark: | | Generate workflow diagram (SVG) | :no_entry_sign: | ## Status diff --git a/model/event.go b/model/event.go index cf44d99..1d25ad0 100644 --- a/model/event.go +++ b/model/event.go @@ -15,10 +15,10 @@ package model import ( + val "github.com/serverlessworkflow/sdk-go/v2/validator" "reflect" validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) const ( @@ -30,17 +30,29 @@ const ( func init() { val.GetValidator().RegisterStructValidation(EventStructLevelValidation, Event{}) + val.GetValidator().RegisterStructValidation(EventRefStructLevelValidation, EventRef{}) } // EventStructLevelValidation custom validator for event kind consumed func EventStructLevelValidation(structLevel validator.StructLevel) { event := structLevel.Current().Interface().(Event) - if event.Kind == EventKindConsumed && len(event.Type) == 0 { structLevel.ReportError(reflect.ValueOf(event.Type), "Type", "type", "reqtypeconsumed", "") } } +// EventRefStructLevelValidation custom validator for event kind consumed +func EventRefStructLevelValidation(structLevel validator.StructLevel) { + eventRef := structLevel.Current().Interface().(EventRef) + + if len(eventRef.ResultEventTimeout) > 0 { + err := val.ValidateISO8601TimeDuration(eventRef.ResultEventTimeout) + if err != nil { + structLevel.ReportError(reflect.ValueOf(eventRef.ResultEventTimeout), "ResultEventTimeout", "resultEventTimeout", "reqiso8601duration", "") + } + } +} + // EventKind ... type EventKind string @@ -75,9 +87,15 @@ type EventRef struct { TriggerEventRef string `json:"triggerEventRef" validate:"required"` // Reference to the unique name of a 'consumed' event definition ResultEventRef string `json:"resultEventRef" validate:"required"` + // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it will be set to the 'actionExecTimeout' + ResultEventTimeout string `json:"resultEventTimeout,omitempty"` // TODO: create StringOrMap structure - // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. + // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. + // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. Data interface{} `json:"data,omitempty"` // Add additional extension context attributes to the produced event ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` } + +// InvokeKind ... +type InvokeKind string diff --git a/model/event_test.go b/model/event_test.go new file mode 100644 index 0000000..cdc63db --- /dev/null +++ b/model/event_test.go @@ -0,0 +1,63 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestEventRefStructLevelValidation(t *testing.T) { + type testCase struct { + name string + eventRef EventRef + err string + } + + testCases := []testCase{ + { + name: "valid resultEventTimeout", + eventRef: EventRef{ + TriggerEventRef: "example valid", + ResultEventRef: "example valid", + ResultEventTimeout: "PT1H", + }, + err: ``, + }, + { + name: "invalid resultEventTimeout", + eventRef: EventRef{ + TriggerEventRef: "example invalid", + ResultEventRef: "example invalid red", + ResultEventTimeout: "10hs", + }, + err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'reqiso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := val.GetValidator().Struct(tc.eventRef) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index c29be6b..919f153 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -35,6 +35,10 @@ states: name: "${ .greet | .name }" actionDataFilter: dataResultsPath: "${ .payload | .greeting }" + - eventRef: + triggerEventRef: example + resultEventRef: example + resultEventTimeout: PT1H stateDataFilter: dataOutputPath: "${ .greeting }" transition: SendTextForHighPriority From b4eb66fb491c44ea02dbd9f7e28dc975542a1378 Mon Sep 17 00:00:00 2001 From: Calvin McLean Date: Mon, 3 Oct 2022 10:21:22 -0700 Subject: [PATCH 019/110] Remove pointer map used to get AuthProperties (#87) * Remove pointer map used to get AuthProperties Fixes #86 Signed-off-by: Calvin McLean * Improve testing for Unmarshal AuthProperties Signed-off-by: Calvin McLean Signed-off-by: Calvin McLean --- model/auth.go | 22 +++++++++------ model/auth_test.go | 68 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 8 deletions(-) diff --git a/model/auth.go b/model/auth.go index 4c0120a..db528e5 100644 --- a/model/auth.go +++ b/model/auth.go @@ -70,11 +70,16 @@ const ( GrantTypeTokenExchange GrantType = "tokenExchange" ) -// authTypesMapping map to support JSON unmarshalling when guessing the auth scheme -var authTypesMapping = map[AuthType]AuthProperties{ - AuthTypeBasic: &BasicAuthProperties{}, - AuthTypeBearer: &BearerAuthProperties{}, - AuthTypeOAuth2: &OAuth2AuthProperties{}, +func getAuthProperties(authType AuthType) (AuthProperties, bool) { + switch authType { + case AuthTypeBasic: + return &BasicAuthProperties{}, true + case AuthTypeBearer: + return &BearerAuthProperties{}, true + case AuthTypeOAuth2: + return &OAuth2AuthProperties{}, true + } + return nil, false } // Auth ... @@ -150,11 +155,12 @@ func (a *Auth) UnmarshalJSON(data []byte) error { if len(a.Scheme) == 0 { a.Scheme = AuthTypeBasic } - if _, ok := authTypesMapping[a.Scheme]; !ok { - return fmt.Errorf("authentication scheme %s not supported", authTypesMapping["type"]) + authProperties, ok := getAuthProperties(a.Scheme) + if !ok { + return fmt.Errorf("authentication scheme %s not supported", a.Scheme) } + // we take the type we want to unmarshal based on the scheme - authProperties := authTypesMapping[a.Scheme] if err := unmarshalKey("properties", auth, authProperties); err != nil { return err } diff --git a/model/auth_test.go b/model/auth_test.go index 3a04504..870d8f8 100644 --- a/model/auth_test.go +++ b/model/auth_test.go @@ -15,6 +15,7 @@ package model import ( + "encoding/json" "testing" "github.com/stretchr/testify/assert" @@ -92,3 +93,70 @@ func TestAuthDefinitionsStructLevelValidation(t *testing.T) { }) } } + +func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { + t.Run("BearerAuthProperties", func(t *testing.T) { + a1JSON := `{ + "name": "a1", + "scheme": "bearer", + "properties": { + "token": "token1" + } + }` + a2JSON := `{ + "name": "a2", + "scheme": "bearer", + "properties": { + "token": "token2" + } + }` + + var a1 Auth + err := json.Unmarshal([]byte(a1JSON), &a1) + assert.NoError(t, err) + + var a2 Auth + err = json.Unmarshal([]byte(a2JSON), &a2) + assert.NoError(t, err) + + a1Properties := a1.Properties.(*BearerAuthProperties) + a2Properties := a2.Properties.(*BearerAuthProperties) + + assert.Equal(t, "token1", a1Properties.Token) + assert.Equal(t, "token2", a2Properties.Token) + assert.NotEqual(t, a1Properties, a2Properties) + }) + + t.Run("OAuth2AuthProperties", func(t *testing.T) { + a1JSON := `{ + "name": "a1", + "scheme": "oauth2", + "properties": { + "clientSecret": "secret1" + } +}` + + a2JSON := `{ + "name": "a2", + "scheme": "oauth2", + "properties": { + "clientSecret": "secret2" + } +}` + + var a1 Auth + err := json.Unmarshal([]byte(a1JSON), &a1) + assert.NoError(t, err) + + var a2 Auth + err = json.Unmarshal([]byte(a2JSON), &a2) + assert.NoError(t, err) + + a1Properties := a1.Properties.(*OAuth2AuthProperties) + a2Properties := a2.Properties.(*OAuth2AuthProperties) + + assert.Equal(t, "secret1", a1Properties.ClientSecret) + assert.Equal(t, "secret2", a2Properties.ClientSecret) + assert.NotEqual(t, a1Properties, a2Properties) + }) +} From 75890210cb1315c72ff324c25192d9ff820e7d5a Mon Sep 17 00:00:00 2001 From: Li Heng Date: Tue, 4 Oct 2022 02:29:13 +0800 Subject: [PATCH 020/110] fix useResults no default value bug (#91) Signed-off-by: LiHeng Signed-off-by: LiHeng --- model/action.go | 15 ++++++++ parser/parser_test.go | 11 +++++- .../workflows/actiondata-defaultvalue.yaml | 34 +++++++++++++++++++ 3 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 parser/testdata/workflows/actiondata-defaultvalue.yaml diff --git a/model/action.go b/model/action.go index 04f9545..43e2333 100644 --- a/model/action.go +++ b/model/action.go @@ -41,6 +41,21 @@ type Action struct { Condition string `json:"condition,omitempty"` } +type actionForUnmarshal Action + +// UnmarshalJSON ... +func (a *Action) UnmarshalJSON(data []byte) error { + v := actionForUnmarshal{ + ActionDataFilter: ActionDataFilter{UseResults: true}, + } + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + *a = Action(v) + return nil +} + // FunctionRef ... type FunctionRef struct { // Name of the referenced function diff --git a/parser/parser_test.go b/parser/parser_test.go index 6a07113..063be61 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -19,8 +19,9 @@ import ( "path/filepath" "testing" - "github.com/serverlessworkflow/sdk-go/v2/model" "github.com/stretchr/testify/assert" + + "github.com/serverlessworkflow/sdk-go/v2/model" ) func TestBasicValidation(t *testing.T) { @@ -63,6 +64,14 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, + }, { + "./testdata/workflows/actiondata-defaultvalue.yaml", + func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "greeting", w.ID) + assert.IsType(t, &model.OperationState{}, w.States[0].(*model.OperationState)) + assert.Equal(t, true, w.States[0].(*model.OperationState).Actions[0].ActionDataFilter.UseResults) + assert.Equal(t, "greeting", w.States[0].(*model.OperationState).Actions[0].Name) + }, }, { "./testdata/workflows/greetings.sw.yaml", func(t *testing.T, w *model.Workflow) { diff --git a/parser/testdata/workflows/actiondata-defaultvalue.yaml b/parser/testdata/workflows/actiondata-defaultvalue.yaml new file mode 100644 index 0000000..6b1628d --- /dev/null +++ b/parser/testdata/workflows/actiondata-defaultvalue.yaml @@ -0,0 +1,34 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.8" +start: + stateName: Greet +functions: + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting +states: + - id: greetingId + name: Greet + type: operation + actions: + - name: greeting + functionRef: + refName: greetingFunction + end: + terminate: true \ No newline at end of file From 7db29d70473477b3730f384b6069cd3f54364e66 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 3 Oct 2022 17:51:33 -0300 Subject: [PATCH 021/110] Remove tsurdilo to reduce spam noise --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a2c5933..63ee3c3 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @tsurdilo @ricardozanini +* @ricardozanini From 2d450404e30339ffbfaeeea37b662203c1521642 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Tue, 4 Oct 2022 22:34:41 +0800 Subject: [PATCH 022/110] feat(action): add invoke/onParentComplete/Id field (#93) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/action.go | 110 ++++++++++++----- model/action_data_filter.go | 1 + model/action_test.go | 236 ++++++++++++++++++++++++++++++++++++ model/event.go | 28 ++++- model/event_test.go | 8 +- 5 files changed, 348 insertions(+), 35 deletions(-) diff --git a/model/action.go b/model/action.go index 43e2333..af63044 100644 --- a/model/action.go +++ b/model/action.go @@ -15,15 +15,20 @@ package model import ( + "bytes" "encoding/json" + "fmt" ) -// Action ... +// Action specify invocations of services or other workflows during workflow execution. type Action struct { - // Unique action definition name - Name string `json:"name,omitempty"` + // ID defines Unique action identifier + ID string `json:"id,omitempty"` + // Name defines Unique action definition name + Name string `json:"name,omitempty"` + // FunctionRef references a reusable function definition FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions + // EventRef references a 'trigger' and 'result' reusable event definitions EventRef *EventRef `json:"eventRef,omitempty"` // References a sub-workflow to be executed SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` @@ -43,7 +48,7 @@ type Action struct { type actionForUnmarshal Action -// UnmarshalJSON ... +// UnmarshalJSON implements json.Unmarshaler func (a *Action) UnmarshalJSON(data []byte) error { v := actionForUnmarshal{ ActionDataFilter: ActionDataFilter{UseResults: true}, @@ -56,34 +61,54 @@ func (a *Action) UnmarshalJSON(data []byte) error { return nil } -// FunctionRef ... +// FunctionRef defines the reference to a reusable function definition type FunctionRef struct { // Name of the referenced function RefName string `json:"refName" validate:"required"` // Function arguments + // TODO: validate it as required if function type is graphql Arguments map[string]interface{} `json:"arguments,omitempty"` // String containing a valid GraphQL selection set + // TODO: validate it as required if function type is graphql SelectionSet string `json:"selectionSet,omitempty"` + + // Invoke specifies if the subflow should be invoked sync or async. + // Defaults to sync. + Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` } -// UnmarshalJSON ... +type functionRefForUnmarshal FunctionRef + +// UnmarshalJSON implements json.Unmarshaler func (f *FunctionRef) UnmarshalJSON(data []byte) error { - funcRef := make(map[string]interface{}) - if err := json.Unmarshal(data, &funcRef); err != nil { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + var err error + switch data[0] { + case '"': f.RefName, err = unmarshalString(data) if err != nil { return err } + f.Invoke = "sync" + return nil + case '{': + v := functionRefForUnmarshal{ + Invoke: "sync", + } + err = json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + *f = FunctionRef(v) return nil } - f.RefName = requiresNotNilOrEmpty(funcRef["refName"]) - if _, found := funcRef["arguments"]; found { - f.Arguments = funcRef["arguments"].(map[string]interface{}) - } - f.SelectionSet = requiresNotNilOrEmpty(funcRef["selectionSet"]) - - return nil + return fmt.Errorf("functionRef value '%s' not support, it must be an object or string", string(data)) } // WorkflowRef holds a reference for a workflow definition @@ -92,32 +117,55 @@ type WorkflowRef struct { WorkflowID string `json:"workflowId" validate:"required"` // Sub-workflow version Version string `json:"version,omitempty"` + + // Invoke specifies if the subflow should be invoked sync or async. + // Defaults to sync. + Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` + + // OnParantComplete specifies how subflow execution should behave when parent workflow completes if invoke is 'async'。 + // Defaults to terminate. + OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` } -// UnmarshalJSON ... +type workflowRefForUnmarshal WorkflowRef + +// UnmarshalJSON implements json.Unmarshaler func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - subflowRef := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &subflowRef); err != nil { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + var err error + switch data[0] { + case '"': s.WorkflowID, err = unmarshalString(data) if err != nil { return err } + s.Invoke, s.OnParentComplete = "sync", "terminate" + return nil + case '{': + v := workflowRefForUnmarshal{ + Invoke: "sync", + OnParentComplete: "terminate", + } + err = json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + *s = WorkflowRef(v) return nil - } - if err := unmarshalKey("version", subflowRef, &s.Version); err != nil { - return err - } - if err := unmarshalKey("workflowId", subflowRef, &s.WorkflowID); err != nil { - return err } - return nil + return fmt.Errorf("subFlowRef value '%s' not support, it must be an object or string", string(data)) } -// Sleep ... +// Sleep defines time periods workflow execution should sleep before & after function execution type Sleep struct { - // Before Amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. - Before string `json:"before,omitempty"` - // After Amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. - After string `json:"after,omitempty"` + // Before defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. + Before string `json:"before,omitempty" validate:"omitempty,iso8601duration"` + // After defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. + After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/action_data_filter.go b/model/action_data_filter.go index 7fcda3e..4dcf6ac 100644 --- a/model/action_data_filter.go +++ b/model/action_data_filter.go @@ -37,6 +37,7 @@ type ActionDataFilter struct { type actionDataFilterForUnmarshal ActionDataFilter +// UnmarshalJSON implements json.Unmarshaler func (f *ActionDataFilter) UnmarshalJSON(data []byte) error { data = bytes.TrimSpace(data) if len(data) == 0 { diff --git a/model/action_test.go b/model/action_test.go index c960f3c..7e022e9 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -13,3 +13,239 @@ // limitations under the License. package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestWorkflowRefUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect WorkflowRef + err string + } + testCases := []testCase{ + { + desp: "normal object test", + data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "async", + OnParentComplete: "continue", + }, + err: ``, + }, + { + desp: "normal object test & defaults", + data: `{"workflowId": "1"}`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "", + Invoke: "sync", + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "normal string test", + data: `"1"`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "", + Invoke: "sync", + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "empty data", + data: ` `, + expect: WorkflowRef{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid string format", + data: `"1`, + expect: WorkflowRef{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json format", + data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, + expect: WorkflowRef{}, + err: `json: cannot unmarshal number into Go struct field workflowRefForUnmarshal.workflowId of type string`, + }, + { + desp: "invalid string or object", + data: `1`, + expect: WorkflowRef{}, + err: `subFlowRef value '1' not support, it must be an object or string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v WorkflowRef + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestWorkflowRefValidate(t *testing.T) { + type testCase struct { + desp string + workflowRef WorkflowRef + err string + } + testCases := []testCase{ + { + desp: "all field & defaults", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "sync", + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "all field", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "async", + OnParentComplete: "continue", + }, + err: ``, + }, + { + desp: "missing workflowId", + workflowRef: WorkflowRef{ + WorkflowID: "", + Version: "2", + Invoke: "sync", + OnParentComplete: "terminate", + }, + err: `Key: 'WorkflowRef.WorkflowID' Error:Field validation for 'WorkflowID' failed on the 'required' tag`, + }, + { + desp: "invalid invoke", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "sync1", + OnParentComplete: "terminate", + }, + err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneof' tag`, + }, + { + desp: "invalid onParentComplete", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "sync", + OnParentComplete: "terminate1", + }, + err: `Key: 'WorkflowRef.OnParentComplete' Error:Field validation for 'OnParentComplete' failed on the 'oneof' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.workflowRef) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestSleepValidate(t *testing.T) { + type testCase struct { + desp string + sleep Sleep + err string + } + testCases := []testCase{ + { + desp: "all field empty", + sleep: Sleep{ + Before: "", + After: "", + }, + err: ``, + }, + { + desp: "only before field", + sleep: Sleep{ + Before: "PT5M", + After: "", + }, + err: ``, + }, + { + desp: "only after field", + sleep: Sleep{ + Before: "", + After: "PT5M", + }, + err: ``, + }, + { + desp: "all field", + sleep: Sleep{ + Before: "PT5M", + After: "PT5M", + }, + err: ``, + }, + { + desp: "invalid before value", + sleep: Sleep{ + Before: "T5M", + After: "PT5M", + }, + err: `Key: 'Sleep.Before' Error:Field validation for 'Before' failed on the 'iso8601duration' tag`, + }, + { + desp: "invalid after value", + sleep: Sleep{ + Before: "PT5M", + After: "T5M", + }, + err: `Key: 'Sleep.After' Error:Field validation for 'After' failed on the 'iso8601duration' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.sleep) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/event.go b/model/event.go index 1d25ad0..2845b8d 100644 --- a/model/event.go +++ b/model/event.go @@ -15,9 +15,11 @@ package model import ( - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "encoding/json" "reflect" + val "github.com/serverlessworkflow/sdk-go/v2/validator" + validator "github.com/go-playground/validator/v10" ) @@ -87,14 +89,36 @@ type EventRef struct { TriggerEventRef string `json:"triggerEventRef" validate:"required"` // Reference to the unique name of a 'consumed' event definition ResultEventRef string `json:"resultEventRef" validate:"required"` - // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it will be set to the 'actionExecTimeout' + + // ResultEventTimeout defines maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the actionExecutionTimeout ResultEventTimeout string `json:"resultEventTimeout,omitempty"` + // TODO: create StringOrMap structure // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. Data interface{} `json:"data,omitempty"` // Add additional extension context attributes to the produced event ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` + + // Invoke specifies if the subflow should be invoked sync or async. + // Defaults to sync. + Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` +} + +type eventRefForUnmarshal EventRef + +// UnmarshalJSON implements json.Unmarshaler +func (e *EventRef) UnmarshalJSON(data []byte) error { + v := eventRefForUnmarshal{ + Invoke: "sync", + } + err := json.Unmarshal(data, &v) + if err != nil { + return nil + } + + *e = EventRef(v) + return nil } // InvokeKind ... diff --git a/model/event_test.go b/model/event_test.go index cdc63db..6653344 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -15,9 +15,11 @@ package model import ( - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestEventRefStructLevelValidation(t *testing.T) { @@ -34,6 +36,7 @@ func TestEventRefStructLevelValidation(t *testing.T) { TriggerEventRef: "example valid", ResultEventRef: "example valid", ResultEventTimeout: "PT1H", + Invoke: "sync", }, err: ``, }, @@ -43,6 +46,7 @@ func TestEventRefStructLevelValidation(t *testing.T) { TriggerEventRef: "example invalid", ResultEventRef: "example invalid red", ResultEventTimeout: "10hs", + Invoke: "sync", }, err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'reqiso8601duration' tag`, }, From cc7f77a1e6ebdc617f99f7568f56f9346066d545 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Tue, 4 Oct 2022 22:41:13 +0800 Subject: [PATCH 023/110] fix(ut): use filename for %s (#94) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- parser/parser_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parser/parser_test.go b/parser/parser_test.go index 063be61..65e05be 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -335,8 +335,8 @@ func TestFromFile(t *testing.T) { t.Run( file.name, func(t *testing.T) { workflow, err := FromFile(file.name) - if assert.NoError(t, err, "Test File %s", file) { - assert.NotNil(t, workflow, "Test File %s", file) + if assert.NoError(t, err, "Test File %s", file.name) { + assert.NotNil(t, workflow, "Test File %s", file.name) file.f(t, workflow) } }, From 10bcfba15bcf18ac66f8e6462aee7870dd0b7f07 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 5 Oct 2022 20:30:10 +0800 Subject: [PATCH 024/110] feat(*): add InvokeKind enum (#97) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/action.go | 12 ++++++------ model/action_test.go | 14 +++++++------- model/event.go | 7 ++----- model/event_test.go | 4 ++-- model/workflow.go | 11 +++++++++++ 5 files changed, 28 insertions(+), 20 deletions(-) diff --git a/model/action.go b/model/action.go index af63044..1ec9a50 100644 --- a/model/action.go +++ b/model/action.go @@ -74,7 +74,7 @@ type FunctionRef struct { // Invoke specifies if the subflow should be invoked sync or async. // Defaults to sync. - Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` } type functionRefForUnmarshal FunctionRef @@ -93,11 +93,11 @@ func (f *FunctionRef) UnmarshalJSON(data []byte) error { if err != nil { return err } - f.Invoke = "sync" + f.Invoke = InvokeKindSync return nil case '{': v := functionRefForUnmarshal{ - Invoke: "sync", + Invoke: InvokeKindSync, } err = json.Unmarshal(data, &v) if err != nil { @@ -120,7 +120,7 @@ type WorkflowRef struct { // Invoke specifies if the subflow should be invoked sync or async. // Defaults to sync. - Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` // OnParantComplete specifies how subflow execution should behave when parent workflow completes if invoke is 'async'。 // Defaults to terminate. @@ -143,11 +143,11 @@ func (s *WorkflowRef) UnmarshalJSON(data []byte) error { if err != nil { return err } - s.Invoke, s.OnParentComplete = "sync", "terminate" + s.Invoke, s.OnParentComplete = InvokeKindSync, "terminate" return nil case '{': v := workflowRefForUnmarshal{ - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate", } err = json.Unmarshal(data, &v) diff --git a/model/action_test.go b/model/action_test.go index 7e022e9..73f7d32 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -37,7 +37,7 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { expect: WorkflowRef{ WorkflowID: "1", Version: "2", - Invoke: "async", + Invoke: InvokeKindAsync, OnParentComplete: "continue", }, err: ``, @@ -48,7 +48,7 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { expect: WorkflowRef{ WorkflowID: "1", Version: "", - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate", }, err: ``, @@ -59,7 +59,7 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { expect: WorkflowRef{ WorkflowID: "1", Version: "", - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate", }, err: ``, @@ -118,7 +118,7 @@ func TestWorkflowRefValidate(t *testing.T) { workflowRef: WorkflowRef{ WorkflowID: "1", Version: "2", - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate", }, err: ``, @@ -128,7 +128,7 @@ func TestWorkflowRefValidate(t *testing.T) { workflowRef: WorkflowRef{ WorkflowID: "1", Version: "2", - Invoke: "async", + Invoke: InvokeKindAsync, OnParentComplete: "continue", }, err: ``, @@ -138,7 +138,7 @@ func TestWorkflowRefValidate(t *testing.T) { workflowRef: WorkflowRef{ WorkflowID: "", Version: "2", - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate", }, err: `Key: 'WorkflowRef.WorkflowID' Error:Field validation for 'WorkflowID' failed on the 'required' tag`, @@ -158,7 +158,7 @@ func TestWorkflowRefValidate(t *testing.T) { workflowRef: WorkflowRef{ WorkflowID: "1", Version: "2", - Invoke: "sync", + Invoke: InvokeKindSync, OnParentComplete: "terminate1", }, err: `Key: 'WorkflowRef.OnParentComplete' Error:Field validation for 'OnParentComplete' failed on the 'oneof' tag`, diff --git a/model/event.go b/model/event.go index 2845b8d..a97189f 100644 --- a/model/event.go +++ b/model/event.go @@ -102,7 +102,7 @@ type EventRef struct { // Invoke specifies if the subflow should be invoked sync or async. // Defaults to sync. - Invoke string `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` } type eventRefForUnmarshal EventRef @@ -110,7 +110,7 @@ type eventRefForUnmarshal EventRef // UnmarshalJSON implements json.Unmarshaler func (e *EventRef) UnmarshalJSON(data []byte) error { v := eventRefForUnmarshal{ - Invoke: "sync", + Invoke: InvokeKindSync, } err := json.Unmarshal(data, &v) if err != nil { @@ -120,6 +120,3 @@ func (e *EventRef) UnmarshalJSON(data []byte) error { *e = EventRef(v) return nil } - -// InvokeKind ... -type InvokeKind string diff --git a/model/event_test.go b/model/event_test.go index 6653344..d469898 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -36,7 +36,7 @@ func TestEventRefStructLevelValidation(t *testing.T) { TriggerEventRef: "example valid", ResultEventRef: "example valid", ResultEventTimeout: "PT1H", - Invoke: "sync", + Invoke: InvokeKindSync, }, err: ``, }, @@ -46,7 +46,7 @@ func TestEventRefStructLevelValidation(t *testing.T) { TriggerEventRef: "example invalid", ResultEventRef: "example invalid red", ResultEventTimeout: "10hs", - Invoke: "sync", + Invoke: InvokeKindSync, }, err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'reqiso8601duration' tag`, }, diff --git a/model/workflow.go b/model/workflow.go index ba86143..e785477 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -19,6 +19,17 @@ import ( "fmt" ) +// InvokeKind defines how the target is invoked. +type InvokeKind string + +const ( + // InvokeKindSync meaning that worfklow execution should wait until the target completes. + InvokeKindSync InvokeKind = "sync" + + // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its completion. + InvokeKindAsync InvokeKind = "async" +) + const ( // DefaultExpressionLang ... DefaultExpressionLang = "jq" From 28e2e7d10da6cee74c67cafe9289de0bb12fb90a Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Wed, 5 Oct 2022 10:26:02 -0300 Subject: [PATCH 025/110] v08 - fix ContinueAs data field assignment (#92) Signed-off-by: spolti Signed-off-by: spolti --- model/action.go | 53 +------------ model/action_test.go | 2 +- model/auth.go | 2 +- model/event.go | 2 +- model/event_test.go | 2 +- model/state_exec_timeout.go | 4 +- model/state_exec_timeout_test.go | 2 +- model/states.go | 1 - model/workflow.go | 60 ++++++++++++++- model/workflow_ref.go | 72 +++++++++++++++++ model/workflow_test.go | 77 +++++++++++++++++++ parser/parser_test.go | 19 +++++ .../workflows/continue-as-example.yaml | 58 ++++++++++++++ 13 files changed, 292 insertions(+), 62 deletions(-) create mode 100644 model/workflow_ref.go create mode 100644 model/workflow_test.go create mode 100644 parser/testdata/workflows/continue-as-example.yaml diff --git a/model/action.go b/model/action.go index 1ec9a50..1897854 100644 --- a/model/action.go +++ b/model/action.go @@ -108,58 +108,7 @@ func (f *FunctionRef) UnmarshalJSON(data []byte) error { return nil } - return fmt.Errorf("functionRef value '%s' not support, it must be an object or string", string(data)) -} - -// WorkflowRef holds a reference for a workflow definition -type WorkflowRef struct { - // Sub-workflow unique id - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - Version string `json:"version,omitempty"` - - // Invoke specifies if the subflow should be invoked sync or async. - // Defaults to sync. - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` - - // OnParantComplete specifies how subflow execution should behave when parent workflow completes if invoke is 'async'。 - // Defaults to terminate. - OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` -} - -type workflowRefForUnmarshal WorkflowRef - -// UnmarshalJSON implements json.Unmarshaler -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - var err error - switch data[0] { - case '"': - s.WorkflowID, err = unmarshalString(data) - if err != nil { - return err - } - s.Invoke, s.OnParentComplete = InvokeKindSync, "terminate" - return nil - case '{': - v := workflowRefForUnmarshal{ - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - } - err = json.Unmarshal(data, &v) - if err != nil { - // TODO: replace the error message with correct type's name - return err - } - *s = WorkflowRef(v) - return nil - } - - return fmt.Errorf("subFlowRef value '%s' not support, it must be an object or string", string(data)) + return fmt.Errorf("functionRef value '%s' is not supported, it must be an object or string", string(data)) } // Sleep defines time periods workflow execution should sleep before & after function execution diff --git a/model/action_test.go b/model/action_test.go index 73f7d32..e658348 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -86,7 +86,7 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { desp: "invalid string or object", data: `1`, expect: WorkflowRef{}, - err: `subFlowRef value '1' not support, it must be an object or string`, + err: `subFlowRef value '1' is not supported, it must be an object or string`, }, } for _, tc := range testCases { diff --git a/model/auth.go b/model/auth.go index db528e5..8f752c7 100644 --- a/model/auth.go +++ b/model/auth.go @@ -107,7 +107,7 @@ func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { return a.unmarshalMany(b) } - return fmt.Errorf("auth value '%s' not support, it must be an array or string", string(b)) + return fmt.Errorf("auth value '%s' is not supported, it must be an array or string", string(b)) } func (a *AuthDefinitions) unmarshalFile(data []byte) error { diff --git a/model/event.go b/model/event.go index a97189f..1cc9f6c 100644 --- a/model/event.go +++ b/model/event.go @@ -50,7 +50,7 @@ func EventRefStructLevelValidation(structLevel validator.StructLevel) { if len(eventRef.ResultEventTimeout) > 0 { err := val.ValidateISO8601TimeDuration(eventRef.ResultEventTimeout) if err != nil { - structLevel.ReportError(reflect.ValueOf(eventRef.ResultEventTimeout), "ResultEventTimeout", "resultEventTimeout", "reqiso8601duration", "") + structLevel.ReportError(reflect.ValueOf(eventRef.ResultEventTimeout), "ResultEventTimeout", "resultEventTimeout", "iso8601duration", "") } } } diff --git a/model/event_test.go b/model/event_test.go index d469898..dc54109 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -48,7 +48,7 @@ func TestEventRefStructLevelValidation(t *testing.T) { ResultEventTimeout: "10hs", Invoke: InvokeKindSync, }, - err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'reqiso8601duration' tag`, + err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'iso8601duration' tag`, }, } diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index 1799d2e..4391f5e 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -28,7 +28,7 @@ type StateExecTimeout struct { Total string `json:"total" validate:"required,iso8601duration"` } -// just define another type to unmarshal object, so the UnmarshalJSON will not called recursively +// just define another type to unmarshal object, so the UnmarshalJSON will not be called recursively type stateExecTimeoutForUnmarshal StateExecTimeout // UnmarshalJSON unmarshal StateExecTimeout object from json bytes @@ -57,5 +57,5 @@ func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { return nil } - return fmt.Errorf("stateExecTimeout value '%s' not support, it must be an object or string", string(data)) + return fmt.Errorf("stateExecTimeout value '%s' is not supported, it must be an object or string", string(data)) } diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go index 0dd3fe1..0c972ce 100644 --- a/model/state_exec_timeout_test.go +++ b/model/state_exec_timeout_test.go @@ -71,7 +71,7 @@ func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { data: `PT10S`, expect: &StateExecTimeout{}, - err: `stateExecTimeout value 'PT10S' not support, it must be an object or string`, + err: `stateExecTimeout value 'PT10S' is not supported, it must be an object or string`, }, { desp: "invalid total type", diff --git a/model/states.go b/model/states.go index f2c379d..c82e141 100644 --- a/model/states.go +++ b/model/states.go @@ -16,7 +16,6 @@ package model import ( "encoding/json" - "k8s.io/apimachinery/pkg/util/intstr" ) diff --git a/model/workflow.go b/model/workflow.go index e785477..4d49590 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -17,6 +17,9 @@ package model import ( "encoding/json" "fmt" + "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "reflect" ) // InvokeKind defines how the target is invoked. @@ -58,6 +61,20 @@ var actionsModelMapping = map[string]func(state map[string]interface{}) State{ StateTypeSleep: func(map[string]interface{}) State { return &SleepState{} }, } +func init() { + val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) +} + +func continueAsStructLevelValidation(structLevel validator.StructLevel) { + continueAs := structLevel.Current().Interface().(ContinueAs) + if len(continueAs.WorkflowExecTimeout.Duration) > 0 { + if err := val.ValidateISO8601TimeDuration(continueAs.WorkflowExecTimeout.Duration); err != nil { + structLevel.ReportError(reflect.ValueOf(continueAs.WorkflowExecTimeout.Duration), + "workflowExecTimeout", "duration", "iso8601duration", "") + } + } +} + // ActionMode ... type ActionMode string @@ -493,16 +510,55 @@ func (e *End) UnmarshalJSON(data []byte) error { return nil } -// ContinueAs ... +// ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) type ContinueAs struct { WorkflowRef // TODO: add object or string data type - // If string type, an expression which selects parts of the states data output to become the workflow data input of continued execution. If object type, a custom object to become the workflow data input of the continued execution + // If string type, an expression which selects parts of the states data output to become the workflow data input of + // continued execution. If object type, a custom object to become the workflow data input of the continued execution Data interface{} `json:"data,omitempty"` // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. Overwrites any specific settings set by that workflow WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` } +type continueAsForUnmarshal ContinueAs + +func (c *ContinueAs) UnmarshalJSON(data []byte) error { + continueAs := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &continueAs); err != nil { + c.WorkflowID, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + + if err := unmarshalKey("data", continueAs, &c.Data); err != nil { + return err + } + if err := unmarshalKey("workflowExecTimeout", continueAs, &c.WorkflowExecTimeout); err != nil { + return err + } + + v := continueAsForUnmarshal{ + WorkflowRef: WorkflowRef{ + Invoke: "sync", + OnParentComplete: "terminate", + }, + Data: c.Data, + WorkflowExecTimeout: c.WorkflowExecTimeout, + } + + err := json.Unmarshal(data, &v) + if err != nil { + return fmt.Errorf("continueAs value '%s' is not supported, it must be an object or string", string(data)) + } + + *c = ContinueAs(v) + return nil + +} + // ProduceEvent ... type ProduceEvent struct { // References a name of a defined event diff --git a/model/workflow_ref.go b/model/workflow_ref.go new file mode 100644 index 0000000..b77da15 --- /dev/null +++ b/model/workflow_ref.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "bytes" + "encoding/json" + "fmt" +) + +// WorkflowRef holds a reference for a workflow definition +type WorkflowRef struct { + // Sub-workflow unique id + WorkflowID string `json:"workflowId" validate:"required"` + // Sub-workflow version + Version string `json:"version,omitempty"` + + // Invoke specifies if the subflow should be invoked sync or async. + // Defaults to sync. + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` + + // OnParantComplete specifies how subflow execution should behave when parent workflow completes if invoke is 'async'。 + // Defaults to terminate. + OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` +} + +type workflowRefForUnmarshal WorkflowRef + +// UnmarshalJSON implements json.Unmarshaler +func (s *WorkflowRef) UnmarshalJSON(data []byte) error { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + var err error + switch data[0] { + case '"': + s.WorkflowID, err = unmarshalString(data) + if err != nil { + return err + } + s.Invoke, s.OnParentComplete = InvokeKindSync, "terminate" + return nil + case '{': + v := workflowRefForUnmarshal{ + Invoke: InvokeKindSync, + OnParentComplete: "terminate", + } + err = json.Unmarshal(data, &v) + if err != nil { + // TODO: replace the error message with correct type's name + return err + } + *s = WorkflowRef(v) + return nil + } + + return fmt.Errorf("subFlowRef value '%s' is not supported, it must be an object or string", string(data)) +} diff --git a/model/workflow_test.go b/model/workflow_test.go new file mode 100644 index 0000000..f2bc0cd --- /dev/null +++ b/model/workflow_test.go @@ -0,0 +1,77 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestContinueAsStructLevelValidation(t *testing.T) { + type testCase struct { + name string + continueAs ContinueAs + err string + } + + testCases := []testCase{ + { + name: "valid ContinueAs", + continueAs: ContinueAs{ + WorkflowRef: WorkflowRef{ + WorkflowID: "another-test", + Version: "2", + Invoke: "sync", + OnParentComplete: "terminate", + }, + Data: "${ del(.customerCount) }", + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "PT1H", + Interrupt: false, + RunBefore: "test", + }, + }, + err: ``, + }, + { + name: "invalid WorkflowExecTimeout", + continueAs: ContinueAs{ + WorkflowRef: WorkflowRef{ + WorkflowID: "test", + Version: "1", + }, + Data: "${ del(.customerCount) }", + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "invalid", + }, + }, + err: `Key: 'ContinueAs.workflowExecTimeout' Error:Field validation for 'workflowExecTimeout' failed on the 'iso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := val.GetValidator().Struct(tc.continueAs) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/parser/parser_test.go b/parser/parser_test.go index 65e05be..b74a049 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -329,6 +329,25 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) }, + }, { + "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Notify Customer", w.Name) + eventState := w.States[1].(*model.DataBasedSwitchState) + + assert.NotNil(t, eventState) + assert.NotEmpty(t, eventState.DataConditions) + assert.IsType(t, &model.EndDataCondition{}, eventState.DataConditions[0]) + + endDataCondition := eventState.DataConditions[0].(*model.EndDataCondition) + assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowRef.WorkflowID) + assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.WorkflowRef.Version) + assert.Equal(t, "${ del(.customerCount) }", endDataCondition.End.ContinueAs.Data) + assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) + assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) + assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) + assert.Equal(t, model.InvokeKindSync, endDataCondition.End.ContinueAs.Invoke) + assert.Equal(t, "terminate", endDataCondition.End.ContinueAs.OnParentComplete) + }, }, } for _, file := range files { diff --git a/parser/testdata/workflows/continue-as-example.yaml b/parser/testdata/workflows/continue-as-example.yaml new file mode 100644 index 0000000..b5957f5 --- /dev/null +++ b/parser/testdata/workflows/continue-as-example.yaml @@ -0,0 +1,58 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: notifycustomerworkflow +name: Notify Customer +version: '1.0' +specVersion: '0.8' +start: WaitForCustomerEvent +states: + - name: WaitForCustomerEvent + type: event + onEvents: + - eventRefs: + - CustomerEvent + eventDataFilter: + data: "${ .customerId }" + toStateData: "${ .eventCustomerId }" + actions: + - functionRef: + refName: NotifyCustomerFunction + arguments: + customerId: "${ .eventCustomerId }" + stateDataFilter: + output: "${ .count = .count + 1 }" + transition: CheckEventQuota + - name: CheckEventQuota + type: switch + dataConditions: + - condition: "${ try(.customerCount) != null and .customerCount > .quota.maxConsumedEvents}" + end: + continueAs: + workflowId: notifycustomerworkflow + version: '1.0' + data: "${ del(.customerCount) }" + workflowExecTimeout: + duration: "PT1H" + runBefore: "GenerateReport" + interrupt: true + defaultCondition: + transition: WaitForCustomerEvent +events: + - name: CustomerEvent + type: org.events.customerEvent + source: customerSource +functions: + - name: NotifyCustomerFunction + operation: http://myapis.org/customerapis.json#notifyCustomer \ No newline at end of file From 1ae306c327c5f63543b1eeb60d8726a24d45b4be Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 5 Oct 2022 21:37:43 +0800 Subject: [PATCH 026/110] feat(eventState): add default value for OnEvents.ActionMode (#96) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/event_state.go | 86 ++++++++++++++++++++++ model/event_state_test.go | 148 ++++++++++++++++++++++++++++++++++++++ model/states.go | 53 +------------- model/workflow.go | 34 ++++----- 4 files changed, 249 insertions(+), 72 deletions(-) create mode 100644 model/event_state.go create mode 100644 model/event_state_test.go diff --git a/model/event_state.go b/model/event_state.go new file mode 100644 index 0000000..ede11f9 --- /dev/null +++ b/model/event_state.go @@ -0,0 +1,86 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" +) + +// EventState used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel +type EventState struct { + // TODO: EventState doesn't have usedForCompensation field. + BaseState + + // If true consuming one of the defined events causes its associated actions to be performed. If false all of the defined events must be consumed in order for actions to be performed + // Defaults to true + Exclusive bool `json:"exclusive,omitempty"` + // Define the events to be consumed and optional actions to be performed + OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` + // State specific timeouts + Timeout *EventStateTimeout `json:"timeouts,omitempty"` +} + +type eventStateForUnmarshal EventState + +// UnmarshalJSON unmarshal EventState object from json bytes +func (e *EventState) UnmarshalJSON(data []byte) error { + v := eventStateForUnmarshal{ + Exclusive: true, + } + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + + *e = EventState(v) + return nil +} + +// OnEvents define which actions are be be performed for the one or more events. +type OnEvents struct { + // References one or more unique event names in the defined workflow events + EventRefs []string `json:"eventRefs" validate:"required,min=1"` + // Specifies how actions are to be performed (in sequence or parallel) + // Defaults to sequential + ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` + // Actions to be performed if expression matches + Actions []Action `json:"actions,omitempty" validate:"omitempty,dive"` + // Event data filter + EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` +} + +type onEventsForUnmarshal OnEvents + +// UnmarshalJSON unmarshal OnEvents object from json bytes +func (o *OnEvents) UnmarshalJSON(data []byte) error { + v := onEventsForUnmarshal{ + ActionMode: ActionModeSequential, + } + + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + + *o = OnEvents(v) + return nil +} + +// EventStateTimeout defines timeout settings for event state +type EventStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` +} diff --git a/model/event_state_test.go b/model/event_state_test.go new file mode 100644 index 0000000..b203373 --- /dev/null +++ b/model/event_state_test.go @@ -0,0 +1,148 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEventStateUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect EventState + err string + } + testCases := []testCase{ + { + desp: "all fields set", + data: `{"name": "1", "Type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, + expect: EventState{ + BaseState: BaseState{ + Name: "1", + Type: StateTypeEvent, + }, + Exclusive: false, + OnEvents: []OnEvents{ + { + EventRefs: []string{"E1", "E2"}, + ActionMode: "parallel", + }, + }, + Timeout: &EventStateTimeout{ + EventTimeout: "PT5M", + ActionExecTimeout: "PT5M", + StateExecTimeout: StateExecTimeout{ + Total: "PT5M", + }, + }, + }, + err: ``, + }, + { + desp: "default exclusive", + data: `{"name": "1", "Type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, + expect: EventState{ + BaseState: BaseState{ + Name: "1", + Type: StateTypeEvent, + }, + Exclusive: true, + OnEvents: []OnEvents{ + { + EventRefs: []string{"E1", "E2"}, + ActionMode: "parallel", + }, + }, + Timeout: &EventStateTimeout{ + EventTimeout: "PT5M", + ActionExecTimeout: "PT5M", + StateExecTimeout: StateExecTimeout{ + Total: "PT5M", + }, + }, + }, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + v := EventState{} + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestOnEventsUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect OnEvents + err string + } + testCases := []testCase{ + { + desp: "all fields set", + data: `{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, + expect: OnEvents{ + EventRefs: []string{"E1", "E2"}, + ActionMode: ActionModeParallel, + }, + err: ``, + }, + { + desp: "default action mode", + data: `{"eventRefs": ["E1", "E2"]}`, + expect: OnEvents{ + EventRefs: []string{"E1", "E2"}, + ActionMode: ActionModeSequential, + }, + err: ``, + }, + { + desp: "invalid object format", + data: `"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, + expect: OnEvents{}, + err: `invalid character ':' after top-level value`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + v := OnEvents{} + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/states.go b/model/states.go index c82e141..b4e876f 100644 --- a/model/states.go +++ b/model/states.go @@ -126,58 +126,6 @@ func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFi // GetMetadata ... func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } -// EventState This state is used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel -type EventState struct { - BaseState - // If true consuming one of the defined events causes its associated actions to be performed. If false all of the defined events must be consumed in order for actions to be performed - Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed - OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts - Timeout *EventStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON ... -func (e *EventState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &e.BaseState); err != nil { - return err - } - - eventStateMap := make(map[string]interface{}) - if err := json.Unmarshal(data, &eventStateMap); err != nil { - return err - } - - e.Exclusive = true - - if eventStateMap["exclusive"] != nil { - exclusiveVal, ok := eventStateMap["exclusive"].(bool) - if ok { - e.Exclusive = exclusiveVal - } - } - - eventStateRaw := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &eventStateRaw); err != nil { - return err - } - if err := json.Unmarshal(eventStateRaw["onEvents"], &e.OnEvents); err != nil { - return err - } - if err := unmarshalKey("timeouts", eventStateRaw, &e.Timeout); err != nil { - return err - } - - return nil -} - -// EventStateTimeout ... -type EventStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - // OperationState Defines actions be performed. Does not wait for incoming events type OperationState struct { BaseState @@ -244,6 +192,7 @@ type ForEachState struct { // State specific timeout Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` // Mode Specifies how iterations are to be performed (sequentially or in parallel) + // Defaults to parallel Mode ForEachModeType `json:"mode,omitempty"` } diff --git a/model/workflow.go b/model/workflow.go index 4d49590..7d237cd 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -17,9 +17,10 @@ package model import ( "encoding/json" "fmt" + "reflect" + "github.com/go-playground/validator/v10" val "github.com/serverlessworkflow/sdk-go/v2/validator" - "reflect" ) // InvokeKind defines how the target is invoked. @@ -33,13 +34,21 @@ const ( InvokeKindAsync InvokeKind = "async" ) +// ActionMode specifies how actions are to be performed. +type ActionMode string + const ( - // DefaultExpressionLang ... - DefaultExpressionLang = "jq" - // ActionModeSequential ... + // ActionModeSequential specifies actions should be performed in sequence ActionModeSequential ActionMode = "sequential" - // ActionModeParallel ... + + // ActionModeParallel specifies actions should be performed in parallel ActionModeParallel ActionMode = "parallel" +) + +const ( + // DefaultExpressionLang ... + DefaultExpressionLang = "jq" + // UnlimitedTimeout description for unlimited timeouts UnlimitedTimeout = "unlimited" ) @@ -75,9 +84,6 @@ func continueAsStructLevelValidation(structLevel validator.StructLevel) { } } -// ActionMode ... -type ActionMode string - // BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces // to make it easy for custom unmarshalers implementations to unmarshal the common data structure. type BaseWorkflow struct { @@ -462,18 +468,6 @@ type OnError struct { End *End `json:"end,omitempty"` } -// OnEvents ... -type OnEvents struct { - // References one or more unique event names in the defined workflow events - EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Specifies how actions are to be performed (in sequence of parallel) - ActionMode ActionMode `json:"actionMode,omitempty"` - // Actions to be performed if expression matches - Actions []Action `json:"actions,omitempty" validate:"omitempty,dive"` - // Event data filter - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - // End definition type End struct { // If true, completes all execution flows in the given workflow instance From a89a5ad97d3a6ea824524015b4567e8c425accea Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 10 Oct 2022 21:40:24 +0800 Subject: [PATCH 027/110] feat(*): add event default dataOnly & kind field value (#101) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/event.go | 55 ++++++++++++++----------- model/event_test.go | 99 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+), 24 deletions(-) diff --git a/model/event.go b/model/event.go index 1cc9f6c..8e8bbef 100644 --- a/model/event.go +++ b/model/event.go @@ -23,16 +23,19 @@ import ( validator "github.com/go-playground/validator/v10" ) +// EventKind defines this event as either `consumed` or `produced` +type EventKind string + const ( - // EventKindConsumed ... + // EventKindConsumed means the event continuation of workflow instance execution EventKindConsumed EventKind = "consumed" - // EventKindProduced ... + + // EventKindProduced means the event was created during worflow instance execution EventKindProduced EventKind = "produced" ) func init() { val.GetValidator().RegisterStructValidation(EventStructLevelValidation, Event{}) - val.GetValidator().RegisterStructValidation(EventRefStructLevelValidation, EventRef{}) } // EventStructLevelValidation custom validator for event kind consumed @@ -43,22 +46,7 @@ func EventStructLevelValidation(structLevel validator.StructLevel) { } } -// EventRefStructLevelValidation custom validator for event kind consumed -func EventRefStructLevelValidation(structLevel validator.StructLevel) { - eventRef := structLevel.Current().Interface().(EventRef) - - if len(eventRef.ResultEventTimeout) > 0 { - err := val.ValidateISO8601TimeDuration(eventRef.ResultEventTimeout) - if err != nil { - structLevel.ReportError(reflect.ValueOf(eventRef.ResultEventTimeout), "ResultEventTimeout", "resultEventTimeout", "iso8601duration", "") - } - } -} - -// EventKind ... -type EventKind string - -// Event ... +// Event used to define events and their correlations type Event struct { Common // Unique event name @@ -67,15 +55,34 @@ type Event struct { Source string `json:"source,omitempty"` // CloudEvent type Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Default is 'consumed' + // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. + // Defaults to `consumed` Kind EventKind `json:"kind,omitempty"` // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload and context attributes should be accessible" + // Defaults to true DataOnly bool `json:"dataOnly,omitempty"` // CloudEvent correlation definitions Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` } -// Correlation ... +type eventForUnmarshal Event + +// UnmarshalJSON unmarshal Event object from json bytes +func (e *Event) UnmarshalJSON(data []byte) error { + v := eventForUnmarshal{ + DataOnly: true, + Kind: EventKindConsumed, + } + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + + *e = Event(v) + return nil +} + +// Correlation define event correlation rules for an event. Only used for `consumed` events type Correlation struct { // CloudEvent Extension Context Attribute name ContextAttributeName string `json:"contextAttributeName" validate:"required"` @@ -83,7 +90,7 @@ type Correlation struct { ContextAttributeValue string `json:"contextAttributeValue,omitempty"` } -// EventRef ... +// EventRef defining invocation of a function via event type EventRef struct { // Reference to the unique name of a 'produced' event definition TriggerEventRef string `json:"triggerEventRef" validate:"required"` @@ -91,7 +98,7 @@ type EventRef struct { ResultEventRef string `json:"resultEventRef" validate:"required"` // ResultEventTimeout defines maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the actionExecutionTimeout - ResultEventTimeout string `json:"resultEventTimeout,omitempty"` + ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` // TODO: create StringOrMap structure // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. @@ -114,7 +121,7 @@ func (e *EventRef) UnmarshalJSON(data []byte) error { } err := json.Unmarshal(data, &v) if err != nil { - return nil + return err } *e = EventRef(v) diff --git a/model/event_test.go b/model/event_test.go index dc54109..bb34e08 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -15,6 +15,7 @@ package model import ( + "encoding/json" "testing" "github.com/stretchr/testify/assert" @@ -65,3 +66,101 @@ func TestEventRefStructLevelValidation(t *testing.T) { }) } } + +func TestEventRefUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect EventRef + err string + } + testCases := []testCase{ + { + desp: "all field", + data: `{"invoke": "async"}`, + expect: EventRef{ + Invoke: InvokeKindAsync, + }, + err: ``, + }, + { + desp: "invoke unset", + data: `{}`, + expect: EventRef{ + Invoke: InvokeKindSync, + }, + err: ``, + }, + { + desp: "invalid json format", + data: `{"invoke": 1}`, + expect: EventRef{}, + err: `json: cannot unmarshal number into Go struct field eventRefForUnmarshal.invoke of type model.InvokeKind`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v EventRef + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestEventUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect Event + err string + } + testCases := []testCase{ + { + desp: "all field", + data: `{"dataOnly": false, "kind": "produced"}`, + expect: Event{ + DataOnly: false, + Kind: EventKindProduced, + }, + err: ``, + }, + { + desp: "optional field dataOnly & kind unset", + data: `{}`, + expect: Event{ + DataOnly: true, + Kind: EventKindConsumed, + }, + err: ``, + }, + { + desp: "invalid json format", + data: `{"dataOnly": "false", "kind": "produced"}`, + expect: Event{}, + err: `json: cannot unmarshal string into Go struct field eventForUnmarshal.dataOnly of type bool`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v Event + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} From b9db1ddbffb21a6e2e392bc7df0dbec3bb824859 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 10 Oct 2022 21:42:00 +0800 Subject: [PATCH 028/110] fix(*): continueAs doesn't have invoke & onParentComplete field (#102) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/action_test.go | 157 -------------------------------- model/workflow.go | 48 +++++----- model/workflow_ref_test.go | 180 +++++++++++++++++++++++++++++++++++++ model/workflow_test.go | 101 ++++++++++++++++++--- parser/parser_test.go | 6 +- 5 files changed, 290 insertions(+), 202 deletions(-) create mode 100644 model/workflow_ref_test.go diff --git a/model/action_test.go b/model/action_test.go index e658348..2301e12 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -15,7 +15,6 @@ package model import ( - "encoding/json" "testing" "github.com/stretchr/testify/assert" @@ -23,162 +22,6 @@ import ( val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func TestWorkflowRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "normal object test", - data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "normal object test & defaults", - data: `{"workflowId": "1"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "normal string test", - data: `"1"`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid string format", - data: `"1`, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{}, - err: `json: cannot unmarshal number into Go struct field workflowRefForUnmarshal.workflowId of type string`, - }, - { - desp: "invalid string or object", - data: `1`, - expect: WorkflowRef{}, - err: `subFlowRef value '1' is not supported, it must be an object or string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestWorkflowRefValidate(t *testing.T) { - type testCase struct { - desp string - workflowRef WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "all field & defaults", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "all field", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "missing workflowId", - workflowRef: WorkflowRef{ - WorkflowID: "", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: `Key: 'WorkflowRef.WorkflowID' Error:Field validation for 'WorkflowID' failed on the 'required' tag`, - }, - { - desp: "invalid invoke", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: "sync1", - OnParentComplete: "terminate", - }, - err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneof' tag`, - }, - { - desp: "invalid onParentComplete", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate1", - }, - err: `Key: 'WorkflowRef.OnParentComplete' Error:Field validation for 'OnParentComplete' failed on the 'oneof' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.workflowRef) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - func TestSleepValidate(t *testing.T) { type testCase struct { desp string diff --git a/model/workflow.go b/model/workflow.go index 7d237cd..9f09e6f 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -15,6 +15,7 @@ package model import ( + "bytes" "encoding/json" "fmt" "reflect" @@ -506,7 +507,11 @@ func (e *End) UnmarshalJSON(data []byte) error { // ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) type ContinueAs struct { - WorkflowRef + // Unique id of the workflow to continue execution as. + WorkflowID string `json:"workflowId" validate:"required"` + // Version of the workflow to continue execution as. + Version string `json:"version,omitempty"` + // TODO: add object or string data type // If string type, an expression which selects parts of the states data output to become the workflow data input of // continued execution. If object type, a custom object to become the workflow data input of the continued execution @@ -518,39 +523,28 @@ type ContinueAs struct { type continueAsForUnmarshal ContinueAs func (c *ContinueAs) UnmarshalJSON(data []byte) error { - continueAs := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &continueAs); err != nil { + data = bytes.TrimSpace(data) + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + var err error + switch data[0] { + case '"': c.WorkflowID, err = unmarshalString(data) + return err + case '{': + v := continueAsForUnmarshal{} + err = json.Unmarshal(data, &v) if err != nil { return err } - return nil - } - - if err := unmarshalKey("data", continueAs, &c.Data); err != nil { - return err - } - if err := unmarshalKey("workflowExecTimeout", continueAs, &c.WorkflowExecTimeout); err != nil { - return err - } - - v := continueAsForUnmarshal{ - WorkflowRef: WorkflowRef{ - Invoke: "sync", - OnParentComplete: "terminate", - }, - Data: c.Data, - WorkflowExecTimeout: c.WorkflowExecTimeout, - } - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("continueAs value '%s' is not supported, it must be an object or string", string(data)) + *c = ContinueAs(v) + return nil } - *c = ContinueAs(v) - return nil - + return fmt.Errorf("continueAs value '%s' is not supported, it must be an object or string", string(data)) } // ProduceEvent ... diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go new file mode 100644 index 0000000..6a27e62 --- /dev/null +++ b/model/workflow_ref_test.go @@ -0,0 +1,180 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestWorkflowRefUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect WorkflowRef + err string + } + testCases := []testCase{ + { + desp: "normal object test", + data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: InvokeKindAsync, + OnParentComplete: "continue", + }, + err: ``, + }, + { + desp: "normal object test & defaults", + data: `{"workflowId": "1"}`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "", + Invoke: InvokeKindSync, + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "normal string test", + data: `"1"`, + expect: WorkflowRef{ + WorkflowID: "1", + Version: "", + Invoke: InvokeKindSync, + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "empty data", + data: ` `, + expect: WorkflowRef{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid string format", + data: `"1`, + expect: WorkflowRef{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json format", + data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, + expect: WorkflowRef{}, + err: `json: cannot unmarshal number into Go struct field workflowRefForUnmarshal.workflowId of type string`, + }, + { + desp: "invalid string or object", + data: `1`, + expect: WorkflowRef{}, + err: `subFlowRef value '1' is not supported, it must be an object or string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v WorkflowRef + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestWorkflowRefValidate(t *testing.T) { + type testCase struct { + desp string + workflowRef WorkflowRef + err string + } + testCases := []testCase{ + { + desp: "all field & defaults", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: InvokeKindSync, + OnParentComplete: "terminate", + }, + err: ``, + }, + { + desp: "all field", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: InvokeKindAsync, + OnParentComplete: "continue", + }, + err: ``, + }, + { + desp: "missing workflowId", + workflowRef: WorkflowRef{ + WorkflowID: "", + Version: "2", + Invoke: InvokeKindSync, + OnParentComplete: "terminate", + }, + err: `Key: 'WorkflowRef.WorkflowID' Error:Field validation for 'WorkflowID' failed on the 'required' tag`, + }, + { + desp: "invalid invoke", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: "sync1", + OnParentComplete: "terminate", + }, + err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneof' tag`, + }, + { + desp: "invalid onParentComplete", + workflowRef: WorkflowRef{ + WorkflowID: "1", + Version: "2", + Invoke: InvokeKindSync, + OnParentComplete: "terminate1", + }, + err: `Key: 'WorkflowRef.OnParentComplete' Error:Field validation for 'OnParentComplete' failed on the 'oneof' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.workflowRef) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/workflow_test.go b/model/workflow_test.go index f2bc0cd..4a862e0 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -15,9 +15,12 @@ package model import ( - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" + "encoding/json" "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestContinueAsStructLevelValidation(t *testing.T) { @@ -31,13 +34,9 @@ func TestContinueAsStructLevelValidation(t *testing.T) { { name: "valid ContinueAs", continueAs: ContinueAs{ - WorkflowRef: WorkflowRef{ - WorkflowID: "another-test", - Version: "2", - Invoke: "sync", - OnParentComplete: "terminate", - }, - Data: "${ del(.customerCount) }", + WorkflowID: "another-test", + Version: "2", + Data: "${ del(.customerCount) }", WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "PT1H", Interrupt: false, @@ -49,11 +48,9 @@ func TestContinueAsStructLevelValidation(t *testing.T) { { name: "invalid WorkflowExecTimeout", continueAs: ContinueAs{ - WorkflowRef: WorkflowRef{ - WorkflowID: "test", - Version: "1", - }, - Data: "${ del(.customerCount) }", + WorkflowID: "test", + Version: "1", + Data: "${ del(.customerCount) }", WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "invalid", }, @@ -75,3 +72,79 @@ func TestContinueAsStructLevelValidation(t *testing.T) { }) } } + +func TestContinueAsUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect ContinueAs + err string + } + testCases := []testCase{ + { + desp: "string", + data: `"1"`, + expect: ContinueAs{ + WorkflowID: "1", + }, + err: ``, + }, + { + desp: "object all field set", + data: `{"workflowId": "1", "version": "2", "data": "3", "workflowExecTimeout": {"duration": "PT1H", "interrupt": true, "runBefore": "4"}}`, + expect: ContinueAs{ + WorkflowID: "1", + Version: "2", + Data: "3", + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "PT1H", + Interrupt: true, + RunBefore: "4", + }, + }, + err: ``, + }, + { + desp: "object optional field unset", + data: `{"workflowId": "1"}`, + expect: ContinueAs{ + WorkflowID: "1", + Version: "", + Data: nil, + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "", + Interrupt: false, + RunBefore: "", + }, + }, + err: ``, + }, + { + desp: "invalid string format", + data: `"{`, + expect: ContinueAs{}, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid object format", + data: `{"workflowId": 1}`, + expect: ContinueAs{}, + err: `json: cannot unmarshal number into Go struct field continueAsForUnmarshal.workflowId of type string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v ContinueAs + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/parser/parser_test.go b/parser/parser_test.go index b74a049..0d78ca1 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -339,14 +339,12 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.EndDataCondition{}, eventState.DataConditions[0]) endDataCondition := eventState.DataConditions[0].(*model.EndDataCondition) - assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowRef.WorkflowID) - assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.WorkflowRef.Version) + assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) + assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) assert.Equal(t, "${ del(.customerCount) }", endDataCondition.End.ContinueAs.Data) assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) - assert.Equal(t, model.InvokeKindSync, endDataCondition.End.ContinueAs.Invoke) - assert.Equal(t, "terminate", endDataCondition.End.ContinueAs.OnParentComplete) }, }, } From e83573beceaca5a776c431bffcd688e0001fbfe5 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 20 Oct 2022 09:20:57 -0300 Subject: [PATCH 029/110] fix nill state timeouts and enhance v08 test (#104) * fix nill state timeouts and enhance v08 test Signed-off-by: spolti * review update Signed-off-by: spolti * review changes Signed-off-by: spolti * correct timeout declaration Signed-off-by: spolti * review updates Signed-off-by: spolti * review additions Signed-off-by: spolti Signed-off-by: spolti --- model/action.go | 7 +- model/auth.go | 4 +- model/delay_state.go | 18 -- model/event.go | 2 - model/event_state.go | 22 ++- model/event_state_test.go | 8 +- model/retry.go | 1 - model/sleep_state.go | 2 +- model/state_exec_timeout.go | 2 +- model/states.go | 83 +++++++-- model/util.go | 3 +- model/workflow.go | 30 +--- parser/parser_test.go | 166 ++++++++++++++++-- .../workflows/eventbaseddataandswitch.sw.json | 4 +- .../workflows/eventbasedswitch.sw.json | 4 +- .../workflows/greetings-v08-spec.sw.yaml | 135 +++++++++++++- 16 files changed, 390 insertions(+), 101 deletions(-) diff --git a/model/action.go b/model/action.go index 1897854..c313415 100644 --- a/model/action.go +++ b/model/action.go @@ -50,14 +50,16 @@ type actionForUnmarshal Action // UnmarshalJSON implements json.Unmarshaler func (a *Action) UnmarshalJSON(data []byte) error { + v := actionForUnmarshal{ ActionDataFilter: ActionDataFilter{UseResults: true}, } err := json.Unmarshal(data, &v) if err != nil { - return err + return fmt.Errorf("action value '%s' is not supported, it must be an object or string", string(data)) } *a = Action(v) + return nil } @@ -101,8 +103,7 @@ func (f *FunctionRef) UnmarshalJSON(data []byte) error { } err = json.Unmarshal(data, &v) if err != nil { - // TODO: replace the error message with correct type's name - return err + return fmt.Errorf("functionRef value '%s' is not supported, it must be an object or string", string(data)) } *f = FunctionRef(v) return nil diff --git a/model/auth.go b/model/auth.go index 8f752c7..c6cd7f9 100644 --- a/model/auth.go +++ b/model/auth.go @@ -113,7 +113,7 @@ func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { func (a *AuthDefinitions) unmarshalFile(data []byte) error { b, err := unmarshalFile(data) if err != nil { - return err + return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) } return a.unmarshalMany(b) @@ -123,7 +123,7 @@ func (a *AuthDefinitions) unmarshalMany(data []byte) error { var auths []Auth err := json.Unmarshal(data, &auths) if err != nil { - return err + return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) } a.Defs = auths diff --git a/model/delay_state.go b/model/delay_state.go index 6f506ff..8d96ebc 100644 --- a/model/delay_state.go +++ b/model/delay_state.go @@ -14,27 +14,9 @@ package model -import ( - "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidation( - DelayStateStructLevelValidation, - DelayState{}, - ) -} - // DelayState Causes the workflow execution to delay for a specified duration type DelayState struct { BaseState // Amount of time (ISO 8601 format) to delay TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` } - -// DelayStateStructLevelValidation custom validator for DelayState Struct -func DelayStateStructLevelValidation(structLevel validator.StructLevel) { - // TODO -} diff --git a/model/event.go b/model/event.go index 8e8bbef..c51fad0 100644 --- a/model/event.go +++ b/model/event.go @@ -96,10 +96,8 @@ type EventRef struct { TriggerEventRef string `json:"triggerEventRef" validate:"required"` // Reference to the unique name of a 'consumed' event definition ResultEventRef string `json:"resultEventRef" validate:"required"` - // ResultEventTimeout defines maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the actionExecutionTimeout ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // TODO: create StringOrMap structure // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. diff --git a/model/event_state.go b/model/event_state.go index ede11f9..5be0910 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -16,6 +16,7 @@ package model import ( "encoding/json" + "fmt" ) // EventState used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel @@ -23,25 +24,32 @@ type EventState struct { // TODO: EventState doesn't have usedForCompensation field. BaseState - // If true consuming one of the defined events causes its associated actions to be performed. If false all of the defined events must be consumed in order for actions to be performed + // If true consuming one of the defined events causes its associated actions to be performed. + // If false all the defined events must be consumed in order for actions to be performed // Defaults to true Exclusive bool `json:"exclusive,omitempty"` // Define the events to be consumed and optional actions to be performed OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` // State specific timeouts - Timeout *EventStateTimeout `json:"timeouts,omitempty"` + Timeouts *EventStateTimeout `json:"timeouts,omitempty"` } type eventStateForUnmarshal EventState // UnmarshalJSON unmarshal EventState object from json bytes func (e *EventState) UnmarshalJSON(data []byte) error { + // var timeout EventStateTimeout + // if err := json.Unmarshal(data, &timeout); err != nil { + // return err + // } + v := eventStateForUnmarshal{ Exclusive: true, + // Timeouts: &timeout, } err := json.Unmarshal(data, &v) if err != nil { - return err + return fmt.Errorf("eventState value '%s' is not supported, it must be an object or string", string(data)) } *e = EventState(v) @@ -71,7 +79,7 @@ func (o *OnEvents) UnmarshalJSON(data []byte) error { err := json.Unmarshal(data, &v) if err != nil { - return err + return fmt.Errorf("onEvents value '%s' is not supported, it must be an object or string", string(data)) } *o = OnEvents(v) @@ -80,7 +88,7 @@ func (o *OnEvents) UnmarshalJSON(data []byte) error { // EventStateTimeout defines timeout settings for event state type EventStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/event_state_test.go b/model/event_state_test.go index b203373..33b3b80 100644 --- a/model/event_state_test.go +++ b/model/event_state_test.go @@ -44,10 +44,10 @@ func TestEventStateUnmarshalJSON(t *testing.T) { ActionMode: "parallel", }, }, - Timeout: &EventStateTimeout{ + Timeouts: &EventStateTimeout{ EventTimeout: "PT5M", ActionExecTimeout: "PT5M", - StateExecTimeout: StateExecTimeout{ + StateExecTimeout: &StateExecTimeout{ Total: "PT5M", }, }, @@ -69,10 +69,10 @@ func TestEventStateUnmarshalJSON(t *testing.T) { ActionMode: "parallel", }, }, - Timeout: &EventStateTimeout{ + Timeouts: &EventStateTimeout{ EventTimeout: "PT5M", ActionExecTimeout: "PT5M", - StateExecTimeout: StateExecTimeout{ + StateExecTimeout: &StateExecTimeout{ Total: "PT5M", }, }, diff --git a/model/retry.go b/model/retry.go index c49bce4..cd4c351 100644 --- a/model/retry.go +++ b/model/retry.go @@ -45,7 +45,6 @@ type Retry struct { Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` // Maximum number of retry attempts. MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` - // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) // TODO: make iso8601duration compatible this type Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` diff --git a/model/sleep_state.go b/model/sleep_state.go index 9ce7a0f..93798db 100644 --- a/model/sleep_state.go +++ b/model/sleep_state.go @@ -26,5 +26,5 @@ type SleepState struct { // SleepStateTimeout defines timeout settings for sleep state type SleepStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index 4391f5e..75a5c87 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -49,11 +49,11 @@ func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { var v stateExecTimeoutForUnmarshal err = json.Unmarshal(data, &v) if err != nil { - // TODO: replace the error message with correct type's name return err } *s = StateExecTimeout(v) + return nil } diff --git a/model/states.go b/model/states.go index b4e876f..8d7b244 100644 --- a/model/states.go +++ b/model/states.go @@ -16,11 +16,13 @@ package model import ( "encoding/json" + "fmt" + "k8s.io/apimachinery/pkg/util/intstr" ) const ( - //StateTypeDelay ... + // StateTypeDelay ... StateTypeDelay = "delay" // StateTypeEvent ... StateTypeEvent = "event" @@ -50,6 +52,33 @@ const ( ForEachModeTypeParallel ForEachModeType = "parallel" ) +func getActionsModelMapping(stateType string, s map[string]interface{}) (State, bool) { + switch stateType { + case StateTypeDelay: + return &DelayState{}, true + case StateTypeEvent: + return &EventState{}, true + case StateTypeOperation: + return &OperationState{}, true + case StateTypeParallel: + return &ParallelState{}, true + case StateTypeSwitch: + if _, ok := s["dataConditions"]; ok { + return &DataBasedSwitchState{}, true + } + return &EventBasedSwitchState{}, true + case StateTypeInject: + return &InjectState{}, true + case StateTypeForEach: + return &ForEachState{}, true + case StateTypeCallback: + return &CallbackState{}, true + case StateTypeSleep: + return &SleepState{}, true + } + return nil, false +} + // StateType ... type StateType string @@ -139,8 +168,8 @@ type OperationState struct { // OperationStateTimeout ... type OperationStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` } // ParallelState Consists of a number of states that are executed in parallel @@ -173,7 +202,7 @@ type InjectState struct { // InjectStateTimeout ... type InjectStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } // ForEachState ... @@ -196,10 +225,25 @@ type ForEachState struct { Mode ForEachModeType `json:"mode,omitempty"` } +type forEachStateForUnmarshal ForEachState + +func (f *ForEachState) UnmarshalJSON(data []byte) error { + v := forEachStateForUnmarshal{ + Mode: StateTypeParallel, + } + err := json.Unmarshal(data, &v) + if err != nil { + return fmt.Errorf("forEachState value '%s' is not supported, it must be an object or string", string(data)) + } + + *f = ForEachState(v) + return nil +} + // ForEachStateTimeout ... type ForEachStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty"` } // CallbackState ... @@ -217,9 +261,9 @@ type CallbackState struct { // CallbackStateTimeout ... type CallbackStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty"` + EventTimeout string `json:"eventTimeout,omitempty"` } // BaseSwitchState ... @@ -238,7 +282,7 @@ type EventBasedSwitchState struct { Timeouts *EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` } -// UnmarshalJSON implementation for json Unmarshal function for the Eventbasedswitch type +// UnmarshalJSON implementation for json Unmarshal function for the EventBasedSwitch type func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { return err @@ -247,10 +291,12 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &eventBasedSwitch); err != nil { return err } - var rawConditions []json.RawMessage - if err := unmarshalKey("timeouts", eventBasedSwitch, &j.Timeouts); err != nil { + + if err := json.Unmarshal(eventBasedSwitch["timeouts"], &j.Timeouts); err != nil { return err } + + var rawConditions []json.RawMessage if err := json.Unmarshal(eventBasedSwitch["eventConditions"], &rawConditions); err != nil { return err } @@ -272,13 +318,14 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { } j.EventConditions[i] = condition } + return nil } // EventBasedSwitchStateTimeout ... type EventBasedSwitchStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + EventTimeout string `json:"eventTimeout,omitempty"` } // EventCondition ... @@ -333,7 +380,7 @@ type DataBasedSwitchState struct { Timeouts *DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` } -// UnmarshalJSON implementation for json Unmarshal function for the Databasedswitch type +// UnmarshalJSON implementation for json Unmarshal function for the DataBasedSwitch type func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { return err @@ -342,10 +389,10 @@ func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &dataBasedSwitch); err != nil { return err } - var rawConditions []json.RawMessage - if err := unmarshalKey("timeouts", dataBasedSwitch, &j.Timeouts); err != nil { + if err := json.Unmarshal(data, &j.Timeouts); err != nil { return err } + var rawConditions []json.RawMessage if err := json.Unmarshal(dataBasedSwitch["dataConditions"], &rawConditions); err != nil { return err } @@ -371,7 +418,7 @@ func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { // DataBasedSwitchStateTimeout ... type DataBasedSwitchStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } // DataCondition ... diff --git a/model/util.go b/model/util.go index 1cfd08b..5bc1cbf 100644 --- a/model/util.go +++ b/model/util.go @@ -17,6 +17,7 @@ package model import ( "bytes" "encoding/json" + "fmt" "net/http" "os" "path/filepath" @@ -70,7 +71,7 @@ func unmarshalString(data []byte) (string, error) { func unmarshalKey(key string, data map[string]json.RawMessage, output interface{}) error { if _, found := data[key]; found { if err := json.Unmarshal(data[key], output); err != nil { - return err + return fmt.Errorf("failed to unmarshall key '%s' with data'%s'", key, data[key]) } } return nil diff --git a/model/workflow.go b/model/workflow.go index 9f09e6f..af25ef8 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -21,6 +21,7 @@ import ( "reflect" "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) @@ -54,23 +55,6 @@ const ( UnlimitedTimeout = "unlimited" ) -var actionsModelMapping = map[string]func(state map[string]interface{}) State{ - StateTypeDelay: func(map[string]interface{}) State { return &DelayState{} }, - StateTypeEvent: func(map[string]interface{}) State { return &EventState{} }, - StateTypeOperation: func(map[string]interface{}) State { return &OperationState{} }, - StateTypeParallel: func(map[string]interface{}) State { return &ParallelState{} }, - StateTypeSwitch: func(s map[string]interface{}) State { - if _, ok := s["dataConditions"]; ok { - return &DataBasedSwitchState{} - } - return &EventBasedSwitchState{} - }, - StateTypeInject: func(map[string]interface{}) State { return &InjectState{} }, - StateTypeForEach: func(map[string]interface{}) State { return &ForEachState{} }, - StateTypeCallback: func(map[string]interface{}) State { return &CallbackState{} }, - StateTypeSleep: func(map[string]interface{}) State { return &SleepState{} }, -} - func init() { val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) } @@ -157,10 +141,13 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(rawState, &mapState); err != nil { return err } - if _, ok := actionsModelMapping[mapState["type"].(string)]; !ok { + + actionsMode, ok := getActionsModelMapping(mapState["type"].(string), mapState) + if !ok { return fmt.Errorf("state %s not supported", mapState["type"]) } - state := actionsModelMapping[mapState["type"].(string)](mapState) + state := actionsMode + if err := json.Unmarshal(rawState, &state); err != nil { return err } @@ -292,7 +279,8 @@ func (t *Timeouts) UnmarshalJSON(data []byte) error { type WorkflowExecTimeout struct { // Duration Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' Duration string `json:"duration,omitempty" validate:"omitempty,min=1"` - // If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is abrupted. + // If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is + // abrupted terminated. Interrupt bool `json:"interrupt,omitempty"` // Name of a workflow state to be executed before workflow instance is terminated RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` @@ -573,7 +561,7 @@ type Branch struct { // Actions to be executed in this branch Actions []Action `json:"actions" validate:"required,min=1"` // Timeouts State specific timeouts - Timeouts BranchTimeouts `json:"timeouts,omitempty"` + Timeouts *BranchTimeouts `json:"timeouts,omitempty"` } // BranchTimeouts ... diff --git a/parser/parser_test.go b/parser/parser_test.go index 0d78ca1..aad9ab4 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -82,21 +82,6 @@ func TestFromFile(t *testing.T) { assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, - }, { - "./testdata/workflows/greetings-v08-spec.sw.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Empty(t, w.Name) - assert.Empty(t, w.Start) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "custom.greeting", w.ID) - assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) - assert.NotEmpty(t, w.Functions[0]) - assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) - assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) - assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "greetingCustomFunction", w.States[0].(*model.OperationState).Actions[0].Name) - }, }, { "./testdata/workflows/eventbaseddataandswitch.sw.json", func(t *testing.T, w *model.Workflow) { @@ -105,6 +90,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) + assert.Equal(t, "PT1H", w.States[1].(*model.EventBasedSwitchState).Timeouts.EventTimeout) }, }, { "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { @@ -346,6 +332,156 @@ func TestFromFile(t *testing.T) { assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) }, + }, { + "./testdata/workflows/greetings-v08-spec.sw.yaml", + func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "custom.greeting", w.ID) + assert.Equal(t, "1.0", w.Version) + assert.Equal(t, "0.8", w.SpecVersion) + + // Workflow "name" no longer a required property + assert.Empty(t, w.Name) + // Workflow "start" no longer a required property + assert.Empty(t, w.Start) + + // Functions: + assert.NotEmpty(t, w.Functions[0]) + assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) + assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) + assert.Equal(t, "/path/to/my/script/greeting.ts#CustomGreeting", w.Functions[0].Operation) + + assert.NotEmpty(t, w.Functions[1]) + assert.Equal(t, "sendTextFunction", w.Functions[1].Name) + assert.Equal(t, model.FunctionTypeGraphQL, w.Functions[1].Type) + assert.Equal(t, "http://myapis.org/inboxapi.json#sendText", w.Functions[1].Operation) + + assert.NotEmpty(t, w.Functions[2]) + assert.Equal(t, "greetingFunction", w.Functions[2].Name) + assert.Empty(t, w.Functions[2].Type) + assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) + + // Delay state + assert.NotEmpty(t, w.States[0].(*model.DelayState).TimeDelay) + assert.Equal(t, "GreetDelay", w.States[0].GetName()) + assert.Equal(t, model.StateType("delay"), w.States[0].GetType()) + assert.Equal(t, "StoreCarAuctionBid", w.States[0].(*model.DelayState).Transition.NextState) + + // Event state + assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents) + assert.Equal(t, "StoreCarAuctionBid", w.States[1].GetName()) + assert.Equal(t, model.StateType("event"), w.States[1].GetType()) + assert.Equal(t, true, w.States[1].(*model.EventState).Exclusive) + assert.NotEmpty(t, true, w.States[1].(*model.EventState).OnEvents[0]) + assert.Equal(t, true, w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.UseData) + assert.Equal(t, "test", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.Data) + assert.Equal(t, "testing", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.ToStateData) + assert.Equal(t, model.ActionModeParallel, w.States[1].(*model.EventState).OnEvents[0].ActionMode) + assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents[0].Actions[0].FunctionRef) + assert.Equal(t, "PT1S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Single) + assert.Equal(t, "PT1H", w.States[1].(*model.EventState).Timeouts.EventTimeout) + assert.Equal(t, "PT3S", w.States[1].(*model.EventState).Timeouts.ActionExecTimeout) + + // Parallel state + assert.NotEmpty(t, w.States[2].(*model.ParallelState).Branches) + assert.Equal(t, "PT5H", w.States[2].(*model.ParallelState).Branches[0].Timeouts.ActionExecTimeout) + assert.Equal(t, "PT6M", w.States[2].(*model.ParallelState).Branches[0].Timeouts.BranchExecTimeout) + assert.Equal(t, "ParallelExec", w.States[2].GetName()) + assert.Equal(t, model.StateType("parallel"), w.States[2].GetType()) + assert.Equal(t, model.CompletionType("allOf"), w.States[2].(*model.ParallelState).CompletionType) + assert.Equal(t, "PT6M", w.States[2].(*model.ParallelState).Timeouts.BranchExecTimeout) + assert.Equal(t, "PT1S", w.States[2].(*model.ParallelState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[2].(*model.ParallelState).Timeouts.StateExecTimeout.Single) + + // Switch state + assert.NotEmpty(t, w.States[3].(*model.EventBasedSwitchState).EventConditions) + assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].GetName()) + assert.Equal(t, model.StateType("switch"), w.States[3].GetType()) + assert.Equal(t, "PT1H", w.States[3].(*model.EventBasedSwitchState).Timeouts.EventTimeout) + assert.Equal(t, "PT1S", w.States[3].(*model.EventBasedSwitchState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[3].(*model.EventBasedSwitchState).Timeouts.StateExecTimeout.Single) + assert.Equal(t, &model.Transition{ + NextState: "HandleNoVisaDecision", + }, w.States[3].(*model.EventBasedSwitchState).DefaultCondition.Transition) + + // DataBasedSwitchState + dataBased := w.States[4].(*model.DataBasedSwitchState) + assert.NotEmpty(t, dataBased.DataConditions) + assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].GetName()) + dataCondition := dataBased.DataConditions[0].(*model.TransitionDataCondition) + assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) + assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) + assert.Equal(t, &model.Transition{ + NextState: "RejectApplication", + }, w.States[4].(*model.DataBasedSwitchState).DefaultCondition.Transition) + assert.Equal(t, "PT1S", w.States[4].(*model.DataBasedSwitchState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[4].(*model.DataBasedSwitchState).Timeouts.StateExecTimeout.Single) + + // operation state + assert.NotEmpty(t, w.States[5].(*model.OperationState).Actions) + assert.Equal(t, "GreetSequential", w.States[5].GetName()) + assert.Equal(t, model.StateType("operation"), w.States[5].GetType()) + assert.Equal(t, model.ActionModeSequential, w.States[5].(*model.OperationState).ActionMode) + assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].Name) + assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].Name) + assert.NotNil(t, w.States[5].(*model.OperationState).Actions[0].FunctionRef) + assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.Equal(t, "example", w.States[5].(*model.OperationState).Actions[0].EventRef.TriggerEventRef) + assert.Equal(t, "example", w.States[5].(*model.OperationState).Actions[0].EventRef.ResultEventRef) + assert.Equal(t, "PT1H", w.States[5].(*model.OperationState).Actions[0].EventRef.ResultEventTimeout) + assert.Equal(t, "PT1H", w.States[5].(*model.OperationState).Timeouts.ActionExecTimeout) + assert.Equal(t, "PT1S", w.States[5].(*model.OperationState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[5].(*model.OperationState).Timeouts.StateExecTimeout.Single) + + // forEach state + assert.NotEmpty(t, w.States[6].(*model.ForEachState).Actions) + assert.Equal(t, "SendTextForHighPriority", w.States[6].GetName()) + assert.Equal(t, model.ForEachModeTypeParallel, w.States[6].(*model.ForEachState).Mode) + assert.Equal(t, model.StateType("foreach"), w.States[6].GetType()) + assert.Equal(t, "${ .messages }", w.States[6].(*model.ForEachState).InputCollection) + assert.NotNil(t, w.States[6].(*model.ForEachState).Actions) + assert.Equal(t, "test", w.States[6].(*model.ForEachState).Actions[0].Name) + assert.NotNil(t, w.States[6].(*model.ForEachState).Actions[0].FunctionRef) + assert.Equal(t, "sendTextFunction", w.States[6].(*model.ForEachState).Actions[0].FunctionRef.RefName) + assert.Equal(t, "example1", w.States[6].(*model.ForEachState).Actions[0].EventRef.TriggerEventRef) + assert.Equal(t, "example1", w.States[6].(*model.ForEachState).Actions[0].EventRef.ResultEventRef) + assert.Equal(t, "PT12H", w.States[6].(*model.ForEachState).Actions[0].EventRef.ResultEventTimeout) + assert.Equal(t, "PT11H", w.States[6].(*model.ForEachState).Timeouts.ActionExecTimeout) + assert.Equal(t, "PT11S", w.States[6].(*model.ForEachState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22S", w.States[6].(*model.ForEachState).Timeouts.StateExecTimeout.Single) + + // Inject state + assert.Equal(t, map[string]interface{}{"result": "Hello World, last state!"}, w.States[7].(*model.InjectState).Data) + assert.Equal(t, "HelloInject", w.States[7].GetName()) + assert.Equal(t, model.StateType("inject"), w.States[7].GetType()) + assert.Equal(t, "PT11M", w.States[7].(*model.InjectState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22M", w.States[7].(*model.InjectState).Timeouts.StateExecTimeout.Single) + + // callback state + assert.NotEmpty(t, w.States[8].(*model.CallbackState).Action) + assert.Equal(t, "CheckCreditCallback", w.States[8].GetName()) + assert.Equal(t, model.StateType("callback"), w.States[8].GetType()) + assert.Equal(t, "callCreditCheckMicroservice", w.States[8].(*model.CallbackState).Action.FunctionRef.RefName) + assert.Equal(t, map[string]interface{}{"customer": "${ .customer }"}, w.States[8].(*model.CallbackState).Action.FunctionRef.Arguments) + assert.Equal(t, "PT10S", w.States[8].(*model.CallbackState).Action.Sleep.Before) + assert.Equal(t, "PT20S", w.States[8].(*model.CallbackState).Action.Sleep.After) + assert.Equal(t, "PT150M", w.States[8].(*model.CallbackState).Timeouts.ActionExecTimeout) + assert.Equal(t, "PT34S", w.States[8].(*model.CallbackState).Timeouts.EventTimeout) + assert.Equal(t, "PT115M", w.States[8].(*model.CallbackState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22M", w.States[8].(*model.CallbackState).Timeouts.StateExecTimeout.Single) + + // sleepState + assert.NotEmpty(t, w.States[9].(*model.SleepState).Duration) + assert.Equal(t, "WaitForCompletionSleep", w.States[9].GetName()) + assert.Equal(t, model.StateType("sleep"), w.States[9].GetType()) + assert.Equal(t, "PT5S", w.States[9].(*model.SleepState).Duration) + assert.NotNil(t, w.States[9].(*model.SleepState).Timeouts) + assert.Equal(t, "PT100S", w.States[9].(*model.SleepState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT200S", w.States[9].(*model.SleepState).Timeouts.StateExecTimeout.Single) + assert.Equal(t, &model.Transition{ + NextState: "GetJobStatus", + }, w.States[9].(*model.SleepState).Transition) + }, }, } for _, file := range files { diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json index 8da1692..e36258f 100644 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ b/parser/testdata/workflows/eventbaseddataandswitch.sw.json @@ -47,7 +47,9 @@ } } ], - "eventTimeout": "PT1H", + "timeouts": { + "eventTimeout": "PT1H" + }, "defaultCondition": { "transition": { "nextState": "HandleNoVisaDecision" diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json index 03563d9..9a11e32 100644 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ b/parser/testdata/workflows/eventbasedswitch.sw.json @@ -37,7 +37,9 @@ } } ], - "eventTimeout": "PT1H", + "timeouts": { + "eventTimeout": "PT1H" + }, "defaultCondition": { "transition": { "nextState": "HandleNoVisaDecision" diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 919f153..7d69a81 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -14,17 +14,92 @@ id: custom.greeting version: '1.0' -description: Greet Someone specVersion: "0.8" +description: Greet Someone +# name: Greeting example #can be empty +# start: Greet #can be empty functions: - name: greetingCustomFunction operation: /path/to/my/script/greeting.ts#CustomGreeting + # Support custom function type definition type: custom - name: sendTextFunction operation: http://myapis.org/inboxapi.json#sendText - + type: graphql + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting states: - - name: Greet + - name: GreetDelay + type: delay + timeDelay: PT5S + transition: + nextState: StoreCarAuctionBid + - name: StoreCarAuctionBid + type: event + exclusive: true + onEvents: + - eventRefs: + - CarBidEvent + eventDataFilter: + useData: true + data: "test" + toStateData: "testing" + actionMode: parallel + actions: + - functionRef: + refName: StoreBidFunction + arguments: + bid: "${ .bid }" + timeouts: + eventTimeout: PT1H + actionExecTimeout: PT3S + stateExecTimeout: + total: PT1S + single: PT2S + - name: ParallelExec + type: parallel + completionType: allOf + branches: + - name: ShortDelayBranch + actions: + - subFlowRef: shortdelayworkflowid + timeouts: + actionExecTimeout: "PT5H" + branchExecTimeout: "PT6M" + - name: LongDelayBranch + actions: + - subFlowRef: longdelayworkflowid + timeouts: + branchExecTimeout: "PT6M" + stateExecTimeout: + total: PT1S + single: PT2S + - name: CheckVisaStatusSwitchEventBased + type: switch + eventConditions: + - eventRef: visaApprovedEvent + transition: HandleApprovedVisa + - eventRef: visaRejectedEvent + transition: HandleRejectedVisa + timeouts: + eventTimeout: PT1H + stateExecTimeout: + total: PT1S + single: PT2S + defaultCondition: + transition: HandleNoVisaDecision + - name: CheckApplicationSwitchDataBased + type: switch + dataConditions: + - condition: "${ .applicants | .age >= 18 }" + transition: + nextState: StartApplication + defaultCondition: + transition: RejectApplication + stateExecTimeout: + total: PT1S + single: PT2S + - name: GreetSequential type: operation actionMode: sequential actions: @@ -35,10 +110,16 @@ states: name: "${ .greet | .name }" actionDataFilter: dataResultsPath: "${ .payload | .greeting }" - - eventRef: + eventRef: triggerEventRef: example resultEventRef: example + # Added "resultEventTimeout" for action eventref resultEventTimeout: PT1H + timeouts: + actionExecTimeout: PT1H + stateExecTimeout: + total: PT1S + single: PT2S stateDataFilter: dataOutputPath: "${ .greeting }" transition: SendTextForHighPriority @@ -46,8 +127,52 @@ states: type: foreach inputCollection: "${ .messages }" actions: - - functionRef: + - name: test + functionRef: refName: sendTextFunction arguments: message: "${ .singlemessage }" + eventRef: + triggerEventRef: example1 + resultEventRef: example1 + # Added "resultEventTimeout" for action eventref + resultEventTimeout: PT12H + timeouts: + actionExecTimeout: PT11H + stateExecTimeout: + total: PT11S + single: PT22S + - name: HelloInject + type: inject + data: + result: Hello World, last state! + timeouts: + stateExecTimeout: + total: PT11M + single: PT22M + - name: CheckCreditCallback + type: callback + action: + functionRef: + refName: callCreditCheckMicroservice + arguments: + customer: "${ .customer }" + sleep: + before: PT10S + after: PT20S + eventRef: CreditCheckCompletedEvent + timeouts: + actionExecTimeout: PT150M + eventTimeout: PT34S + stateExecTimeout: + total: PT115M + single: PT22M + - name: WaitForCompletionSleep + type: sleep + duration: PT5S + transition: GetJobStatus + timeouts: + stateExecTimeout: + total: PT100S + single: PT200S end: true From 9055243d6018db9447a23dd1b934d0186d3bf08a Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Fri, 21 Oct 2022 22:28:40 +0800 Subject: [PATCH 030/110] fix(103): support refs to yaml format file (#105) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- go.mod | 1 + go.sum | 2 + hack/conv/main.go | 128 ++++++++++++++++++ model/util.go | 13 ++ .../applicationrequest-issue103.json | 79 +++++++++++ parser/testdata/workflows/urifiles/auth.yaml | 23 ++++ test/path.go | 53 ++++++++ test/path_test.go | 31 +++++ 8 files changed, 330 insertions(+) create mode 100644 hack/conv/main.go create mode 100644 parser/testdata/workflows/applicationrequest-issue103.json create mode 100644 parser/testdata/workflows/urifiles/auth.yaml create mode 100644 test/path.go create mode 100644 test/path_test.go diff --git a/go.mod b/go.mod index af6d842..426549c 100644 --- a/go.mod +++ b/go.mod @@ -4,6 +4,7 @@ go 1.19 require ( github.com/go-playground/validator/v10 v10.11.1 + github.com/pkg/errors v0.9.1 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.7.0 k8s.io/apimachinery v0.25.1 diff --git a/go.sum b/go.sum index a82bf18..9657239 100644 --- a/go.sum +++ b/go.sum @@ -30,6 +30,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= diff --git a/hack/conv/main.go b/hack/conv/main.go new file mode 100644 index 0000000..e70e738 --- /dev/null +++ b/hack/conv/main.go @@ -0,0 +1,128 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "encoding/json" + "errors" + "log" + "os" + "path" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/serverlessworkflow/sdk-go/v2/test" +) + +func convert(i interface{}) interface{} { + switch x := i.(type) { + case map[interface{}]interface{}: + m2 := map[string]interface{}{} + for k, v := range x { + m2[k.(string)] = convert(v) + } + return m2 + case []interface{}: + for i, v := range x { + x[i] = convert(v) + } + } + return i +} + +func transform( + files []string, + srcFormat string, + destFormat string, + unmarshal func(data []byte, out interface{}) error, + marshal func(in interface{}) ([]byte, error), +) { + for _, srcFile := range files { + if !strings.HasSuffix(srcFile, srcFormat) { + log.Printf("%s is not %s format, skip it", srcFile, srcFormat) + continue + } + + destFile := srcFile[0:len(srcFile)-len(srcFormat)] + destFormat + if _, err := os.Stat(destFile); err == nil { + log.Printf("ERR: the target file %v exists, skip it", destFile) + continue + } else if !errors.Is(err, os.ErrNotExist) { + log.Printf("ERR: stat target file %v, %v, skip it", destFile, err) + continue + } + + srcData, err := os.ReadFile(filepath.Clean(srcFile)) + if err != nil { + log.Printf("ERR: cannot read file %v, %v, skip it", srcFile, err) + continue + } + + var srcObj interface{} + err = unmarshal(srcData, &srcObj) + if err != nil { + log.Printf("ERR: cannot unmarshal file %v to %s, %v, skip it", srcFile, srcFormat, err) + continue + } + + destObj := convert(srcObj) + destData, err := marshal(destObj) + if err != nil { + log.Printf("ERR: cannot marshal fild %v data to %v, %v, skip it", srcFile, destFormat, err) + continue + } + + err = os.WriteFile(destFile, destData, 0600) + if err != nil { + log.Printf("ERR: cannot write to file %v, %v, skip it", destFile, err) + continue + } + + log.Printf("convert %v to %v done", srcFile, destFile) + } +} + +func main() { + // TODO: make this as argument + dir := path.Join(test.CurrentProjectPath(), "parser", "testdata", "workflows", "urifiles") + dirEntries, err := os.ReadDir(dir) + if err != nil { + panic(err) + } + + files := make([]string, 0, len(dirEntries)) + for _, entry := range dirEntries { + if entry.IsDir() { + log.Printf("%s is directory, skip it", entry.Name()) + continue + } + + files = append(files, path.Join(dir, entry.Name())) + } + + log.Printf("found %v files", len(files)) + + // First, convert all json format files to yaml + log.Printf("start to convert all json format files to yaml format") + transform(files, ".json", ".yaml", json.Unmarshal, yaml.Marshal) + + // Second, convert all yaml format files to json + log.Printf("start to convert all yaml format files to json format") + transform(files, ".yaml", ".json", yaml.Unmarshal, func(in interface{}) ([]byte, error) { + return json.MarshalIndent(in, "", " ") + }) +} diff --git a/model/util.go b/model/util.go index 5bc1cbf..5e4e102 100644 --- a/model/util.go +++ b/model/util.go @@ -22,6 +22,8 @@ import ( "os" "path/filepath" "strings" + + "sigs.k8s.io/yaml" ) const prefix = "file:/" @@ -50,6 +52,17 @@ func getBytesFromFile(s string) (b []byte, err error) { if b, err = os.ReadFile(filepath.Clean(s)); err != nil { return nil, err } + + // TODO: optimize this + // NOTE: In specification, we can declared independently definitions with another file format, so + // we must convert independently yaml source to json format data before unmarshal. + if strings.HasSuffix(s, ".yaml") || strings.HasSuffix(s, ".yml") { + b, err = yaml.YAMLToJSON(b) + if err != nil { + return nil, err + } + } + return b, nil } diff --git a/parser/testdata/workflows/applicationrequest-issue103.json b/parser/testdata/workflows/applicationrequest-issue103.json new file mode 100644 index 0000000..9b8c0a2 --- /dev/null +++ b/parser/testdata/workflows/applicationrequest-issue103.json @@ -0,0 +1,79 @@ +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": "./testdata/workflows/urifiles/auth.yaml", + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" + } + ], + "retries": [ + { + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" + } + ], + "states": [ + { + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .applicants | .age >= 18 }", + "transition": { + "nextState": "StartApplication" + } + }, + { + "condition": "${ .applicants | .age < 18 }", + "transition": { + "nextState": "RejectApplication" + } + } + ], + "default": { + "transition": { + "nextState": "RejectApplication" + } + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "startApplicationWorkflowId" + } + } + ], + "end": { + "terminate": true + } + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "parameters": { + "applicant": "${ .applicant }" + } + } + } + ], + "end": { + "terminate": true + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.yaml b/parser/testdata/workflows/urifiles/auth.yaml new file mode 100644 index 0000000..14ba4e2 --- /dev/null +++ b/parser/testdata/workflows/urifiles/auth.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +- name: testAuth + properties: + token: test_token + scheme: bearer +- name: testAuth2 + properties: + password: test_pwd + username: test_user + scheme: basic diff --git a/test/path.go b/test/path.go new file mode 100644 index 0000000..e9ff5e4 --- /dev/null +++ b/test/path.go @@ -0,0 +1,53 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test + +import ( + "os" + "path/filepath" + "runtime" + + "github.com/pkg/errors" +) + +// CurrentProjectPath get the project root path +func CurrentProjectPath() string { + path := currentFilePath() + + ppath, err := filepath.Abs(filepath.Join(filepath.Dir(path), "../")) + if err != nil { + panic(errors.Wrapf(err, "Get current project path with %s failed", path)) + } + + f, err := os.Stat(ppath) + if err != nil { + panic(errors.Wrapf(err, "Stat project path %v failed", ppath)) + } + + if f.Mode()&os.ModeSymlink != 0 { + fpath, err := os.Readlink(ppath) + if err != nil { + panic(errors.Wrapf(err, "Readlink from path %v failed", fpath)) + } + ppath = fpath + } + + return ppath +} + +func currentFilePath() string { + _, file, _, _ := runtime.Caller(1) + return file +} diff --git a/test/path_test.go b/test/path_test.go new file mode 100644 index 0000000..4ccb672 --- /dev/null +++ b/test/path_test.go @@ -0,0 +1,31 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCurrentProjectPath(t *testing.T) { + t.Run("normal test", func(t *testing.T) { + path := CurrentProjectPath() + + // NOTE: the '/code' path is used with code pipeline. + // When code running in the pipeline, the codebase will copy to /home/code directory. + assert.Regexp(t, "(/sdk-go$)|(/code$)", path) + }) +} From 7b22226ae24544e78c1a20ae3c4e05b12b3aed80 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 24 Oct 2022 21:46:14 +0800 Subject: [PATCH 031/110] refactor(*): cleanup unused commented code (#116) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/event_state.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/model/event_state.go b/model/event_state.go index 5be0910..ba6eb9e 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -38,14 +38,8 @@ type eventStateForUnmarshal EventState // UnmarshalJSON unmarshal EventState object from json bytes func (e *EventState) UnmarshalJSON(data []byte) error { - // var timeout EventStateTimeout - // if err := json.Unmarshal(data, &timeout); err != nil { - // return err - // } - v := eventStateForUnmarshal{ Exclusive: true, - // Timeouts: &timeout, } err := json.Unmarshal(data, &v) if err != nil { From 5a0a34fe95ae980901cbff27f37b746faf11088e Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 24 Oct 2022 21:53:17 +0800 Subject: [PATCH 032/110] feat(*): sync examples to v0.8 spec (#100) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/states.go | 7 +- model/workflow.go | 1 + parser/eventdefs.yml | 23 ++ parser/functiondefs.json | 16 + parser/parser_test.go | 1 + .../workflows/VetAppointmentWorkflow.json | 45 +++ .../applicationrequest-issue16.sw.yaml | 2 +- .../workflows/applicationrequest-issue69.json | 2 +- .../workflows/applicationrequest.json | 140 ++++----- .../applicationrequest.multiauth.json | 2 +- .../workflows/applicationrequest.rp.json | 2 +- .../workflows/applicationrequest.url.json | 2 +- parser/testdata/workflows/checkInbox.json | 53 ++++ parser/testdata/workflows/checkcarvitals.json | 60 ++++ .../workflows/checkinbox.cron-test.sw.yaml | 2 +- parser/testdata/workflows/checkinbox.sw.yaml | 2 +- .../customerbankingtransactions.json | 43 +++ .../workflows/customercreditcheck.json | 92 ++++++ .../workflows/eventbaseddataandswitch.sw.json | 2 +- .../workflows/eventbasedgreeting.sw.json | 100 +++--- .../workflows/eventbasedgreeting.sw.p.json | 2 +- .../eventbasedgreetingexclusive.sw.json | 2 +- .../eventbasedgreetingnonexclusive.sw.json | 2 +- .../workflows/eventbasedswitch.sw.json | 2 +- .../workflows/eventbasedswitchstate.json | 70 +++++ .../testdata/workflows/fillglassofwater.json | 48 +++ .../workflows/finalizeCollegeApplication.json | 74 +++++ .../greetings-constants-file.sw.yaml | 2 +- .../workflows/greetings-secret-file.sw.yaml | 2 +- .../workflows/greetings-secret.sw.yaml | 2 +- parser/testdata/workflows/greetings.sw.json | 64 ++-- parser/testdata/workflows/greetings.sw.yaml | 2 +- .../workflows/greetings_sleep.sw.json | 2 +- .../workflows/handleCarAuctionBid.json | 49 +++ parser/testdata/workflows/helloworld.json | 18 ++ parser/testdata/workflows/jobmonitoring.json | 127 ++++++++ .../testdata/workflows/onboardcustomer.json | 25 ++ parser/testdata/workflows/parallelexec.json | 34 ++ .../workflows/patientVitalsWorkflow.json | 110 +++++++ .../workflows/patientonboarding.sw.yaml | 22 +- .../workflows/paymentconfirmation.json | 96 ++++++ .../workflows/provisionorders.sw.json | 172 +++++----- .../workflows/purchaseorderworkflow.sw.json | 294 +++++++++--------- .../roomreadings.timeouts.file.sw.json | 2 +- .../workflows/roomreadings.timeouts.sw.json | 153 ++++----- .../workflows/sendcloudeventonprovision.json | 47 +++ .../testdata/workflows/sendcustomeremail.json | 32 ++ .../testdata/workflows/solvemathproblems.json | 37 +++ parser/testdata/workflows/vitalscheck.json | 53 ++++ .../applicationrequest-issue44.json | 2 +- .../applicationrequest-issue74.json | 2 +- ...pplicationrequest.auth.invalid.format.json | 2 +- .../applicationrequest.authdupl.json | 2 +- 53 files changed, 1651 insertions(+), 499 deletions(-) create mode 100644 parser/eventdefs.yml create mode 100644 parser/functiondefs.json create mode 100644 parser/testdata/workflows/VetAppointmentWorkflow.json create mode 100644 parser/testdata/workflows/checkInbox.json create mode 100644 parser/testdata/workflows/checkcarvitals.json create mode 100644 parser/testdata/workflows/customerbankingtransactions.json create mode 100644 parser/testdata/workflows/customercreditcheck.json create mode 100644 parser/testdata/workflows/eventbasedswitchstate.json create mode 100644 parser/testdata/workflows/fillglassofwater.json create mode 100644 parser/testdata/workflows/finalizeCollegeApplication.json create mode 100644 parser/testdata/workflows/handleCarAuctionBid.json create mode 100644 parser/testdata/workflows/helloworld.json create mode 100644 parser/testdata/workflows/jobmonitoring.json create mode 100644 parser/testdata/workflows/onboardcustomer.json create mode 100644 parser/testdata/workflows/parallelexec.json create mode 100644 parser/testdata/workflows/patientVitalsWorkflow.json create mode 100644 parser/testdata/workflows/paymentconfirmation.json create mode 100644 parser/testdata/workflows/sendcloudeventonprovision.json create mode 100644 parser/testdata/workflows/sendcustomeremail.json create mode 100644 parser/testdata/workflows/solvemathproblems.json create mode 100644 parser/testdata/workflows/vitalscheck.json diff --git a/model/states.go b/model/states.go index 8d7b244..69722aa 100644 --- a/model/states.go +++ b/model/states.go @@ -292,8 +292,11 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { return err } - if err := json.Unmarshal(eventBasedSwitch["timeouts"], &j.Timeouts); err != nil { - return err + eventBaseTimeoutsRawMessage, ok := eventBasedSwitch["timeouts"] + if ok { + if err := json.Unmarshal(eventBaseTimeoutsRawMessage, &j.Timeouts); err != nil { + return err + } } var rawConditions []json.RawMessage diff --git a/model/workflow.go b/model/workflow.go index af25ef8..e8b0b2a 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -164,6 +164,7 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { if nestedData, err = getBytesFromFile(s); err != nil { return err } + m := make(map[string][]Event) if err := json.Unmarshal(nestedData, &m); err != nil { return err diff --git a/parser/eventdefs.yml b/parser/eventdefs.yml new file mode 100644 index 0000000..6541662 --- /dev/null +++ b/parser/eventdefs.yml @@ -0,0 +1,23 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +events: +- name: PaymentReceivedEvent + type: payment.receive + source: paymentEventSource + correlation: + - contextAttributeName: accountId +- name: ConfirmationCompletedEvent + type: payment.confirmation + kind: produced \ No newline at end of file diff --git a/parser/functiondefs.json b/parser/functiondefs.json new file mode 100644 index 0000000..f9a3b06 --- /dev/null +++ b/parser/functiondefs.json @@ -0,0 +1,16 @@ +{ + "functions": [ + { + "name": "checkFundsAvailability", + "operation": "file://myapis/billingapis.json#checkFunds" + }, + { + "name": "sendSuccessEmail", + "operation": "file://myapis/emailapis.json#paymentSuccess" + }, + { + "name": "sendInsufficientFundsEmail", + "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" + } + ] +} \ No newline at end of file diff --git a/parser/parser_test.go b/parser/parser_test.go index aad9ab4..59598b5 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -31,6 +31,7 @@ func TestBasicValidation(t *testing.T) { for _, file := range files { if !file.IsDir() { workflow, err := FromFile(filepath.Join(rootPath, file.Name())) + if assert.NoError(t, err, "Test File %s", file.Name()) { assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) diff --git a/parser/testdata/workflows/VetAppointmentWorkflow.json b/parser/testdata/workflows/VetAppointmentWorkflow.json new file mode 100644 index 0000000..f6c0d43 --- /dev/null +++ b/parser/testdata/workflows/VetAppointmentWorkflow.json @@ -0,0 +1,45 @@ +{ + "id": "VetAppointmentWorkflow", + "name": "Vet Appointment Workflow", + "description": "Vet service call via events", + "version": "1.0", + "specVersion": "0.8", + "start": "MakeVetAppointmentState", + "events": [ + { + "name": "MakeVetAppointment", + "source": "VetServiceSource", + "type": "events.vet.appointments", + "kind": "produced" + }, + { + "name": "VetAppointmentInfo", + "source": "VetServiceSource", + "type": "events.vet.appointments", + "kind": "consumed" + } + ], + "states": [ + { + "name": "MakeVetAppointmentState", + "type": "operation", + "actions": [ + { + "name": "MakeAppointmentAction", + "eventRef": { + "triggerEventRef": "MakeVetAppointment", + "data": "${ .patientInfo }", + "resultEventRef": "VetAppointmentInfo" + }, + "actionDataFilter": { + "results": "${ .appointmentInfo }" + } + } + ], + "timeouts": { + "actionExecTimeout": "PT15M" + }, + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml index a2e19a5..395ac8b 100644 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml @@ -17,7 +17,7 @@ version: '1.0' name: Applicant Request Decision Workflow description: Determine if applicant request is valid start: CheckApplication -specVersion: "0.7" +specVersion: "0.8" functions: - name: sendRejectionEmailFunction operation: http://myapis.org/applicationapi.json#emailRejection diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json index 2b2cec2..876faec 100644 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ b/parser/testdata/workflows/applicationrequest-issue69.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": "./testdata/workflows/urifiles/auth.json", "functions": [ { diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json index ee43120..674532a 100644 --- a/parser/testdata/workflows/applicationrequest.json +++ b/parser/testdata/workflows/applicationrequest.json @@ -1,85 +1,75 @@ { - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ + "id": "applicantrequest", + "version": "1.0", + "specVersion": "0.8", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "functions": [ { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/applicationapi.json#emailRejection" } - ], - "default": { - "transition": { - "nextState": "RejectApplication" + ], + "auth": [ + { + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ + ], + "retries": [ { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ + ], + "states": [ { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" + "name": "CheckApplication", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .applicants | .age >= 18 }", + "transition": "StartApplication" + }, + { + "condition": "${ .applicants | .age < 18 }", + "transition": "RejectApplication" + } + ], + "defaultCondition": { + "transition": "RejectApplication" } - } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": "startApplicationWorkflowId" + } + ], + "end": true + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "arguments": { + "applicant": "${ .applicant }" + } + } + } + ], + "end": true } - ], - "end": { - "terminate": true - } - } - ] + ] } \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json index 958c473..0b7d5e9 100644 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ b/parser/testdata/workflows/applicationrequest.multiauth.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": [ { "name": "testAuth", diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json index bc71b19..52b18a8 100644 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ b/parser/testdata/workflows/applicationrequest.rp.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "CheckApplication" }, diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json index 3dde238..0a9be7a 100644 --- a/parser/testdata/workflows/applicationrequest.url.json +++ b/parser/testdata/workflows/applicationrequest.url.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", - "specVersion": "0.7", + "specVersion": "0.8", "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestfunctions.json", "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json", "start": { diff --git a/parser/testdata/workflows/checkInbox.json b/parser/testdata/workflows/checkInbox.json new file mode 100644 index 0000000..0256a8e --- /dev/null +++ b/parser/testdata/workflows/checkInbox.json @@ -0,0 +1,53 @@ +{ + "id": "checkInbox", + "name": "Check Inbox Workflow", + "version": "1.0", + "specVersion": "0.8", + "description": "Periodically Check Inbox", + "start": { + "stateName": "CheckInbox", + "schedule": { + "cron": "0 0/15 * * * ?" + } + }, + "functions": [ + { + "name": "checkInboxFunction", + "operation": "http://myapis.org/inboxapi.json#checkNewMessages" + }, + { + "name": "sendTextFunction", + "operation": "http://myapis.org/inboxapi.json#sendText" + } + ], + "states": [ + { + "name": "CheckInbox", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": "checkInboxFunction" + } + ], + "transition": "SendTextForHighPriority" + }, + { + "name": "SendTextForHighPriority", + "type": "foreach", + "inputCollection": "${ .messages }", + "iterationParam": "singlemessage", + "actions": [ + { + "functionRef": { + "refName": "sendTextFunction", + "arguments": { + "message": "${ .singlemessage }" + } + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/checkcarvitals.json b/parser/testdata/workflows/checkcarvitals.json new file mode 100644 index 0000000..a0f14ef --- /dev/null +++ b/parser/testdata/workflows/checkcarvitals.json @@ -0,0 +1,60 @@ +{ + "id": "checkcarvitals", + "name": "Check Car Vitals Workflow", + "version": "1.0", + "specVersion": "0.8", + "start": "WhenCarIsOn", + "states": [ + { + "name": "WhenCarIsOn", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "CarTurnedOnEvent" + ] + } + ], + "transition": "DoCarVitalChecks" + }, + { + "name": "DoCarVitalChecks", + "type": "operation", + "actions": [ + { + "subFlowRef": "vitalscheck", + "sleep": { + "after": "PT1S" + } + } + ], + "transition": "CheckContinueVitalChecks" + }, + { + "name": "CheckContinueVitalChecks", + "type": "switch", + "eventConditions": [ + { + "name": "Car Turned Off Condition", + "eventRef": "CarTurnedOffEvent", + "end": true + } + ], + "defaultCondition": { + "transition": "DoCarVitalChecks" + } + } + ], + "events": [ + { + "name": "CarTurnedOnEvent", + "type": "car.events", + "source": "my/car" + }, + { + "name": "CarTurnedOffEvent", + "type": "car.events", + "source": "my/car" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml index 8992626..0729e80 100644 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml @@ -16,7 +16,7 @@ id: checkInbox name: Check Inbox Workflow description: Periodically Check Inbox version: '1.0' -specVersion: "0.7" +specVersion: "0.8" start: stateName: CheckInbox schedule: diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml index b6be17f..e42d9a2 100644 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ b/parser/testdata/workflows/checkinbox.sw.yaml @@ -16,7 +16,7 @@ id: checkInbox name: Check Inbox Workflow description: Periodically Check Inbox version: '1.0' -specVersion: "0.7" +specVersion: "0.8" start: stateName: CheckInbox schedule: diff --git a/parser/testdata/workflows/customerbankingtransactions.json b/parser/testdata/workflows/customerbankingtransactions.json new file mode 100644 index 0000000..933c7e4 --- /dev/null +++ b/parser/testdata/workflows/customerbankingtransactions.json @@ -0,0 +1,43 @@ +{ + "id": "customerbankingtransactions", + "name": "Customer Banking Transactions Workflow", + "version": "1.0", + "specVersion": "0.8", + "autoRetries": true, + "constants": { + "largetxamount": 5000 + }, + "states": [ + { + "name": "ProcessTransactions", + "type": "foreach", + "inputCollection": "${ .customer.transactions }", + "iterationParam": "${ .tx }", + "actions": [ + { + "name": "Process Larger Transaction", + "functionRef": "Banking Service - Larger Tx", + "condition": "${ .tx >= $CONST.largetxamount }" + }, + { + "name": "Process Smaller Transaction", + "functionRef": "Banking Service - Smaller Tx", + "condition": "${ .tx < $CONST.largetxamount }" + } + ], + "end": true + } + ], + "functions": [ + { + "name": "Banking Service - Larger Tx", + "type": "asyncapi", + "operation": "banking.yaml#largerTransation" + }, + { + "name": "Banking Service - Smaller T", + "type": "asyncapi", + "operation": "banking.yaml#smallerTransation" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/customercreditcheck.json b/parser/testdata/workflows/customercreditcheck.json new file mode 100644 index 0000000..d19c009 --- /dev/null +++ b/parser/testdata/workflows/customercreditcheck.json @@ -0,0 +1,92 @@ +{ + "id": "customercreditcheck", + "version": "1.0", + "specVersion": "0.8", + "name": "Customer Credit Check Workflow", + "description": "Perform Customer Credit Check", + "start": "CheckCredit", + "functions": [ + { + "name": "creditCheckFunction", + "operation": "http://myapis.org/creditcheckapi.json#doCreditCheck" + }, + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/creditcheckapi.json#rejectionEmail" + } + ], + "events": [ + { + "name": "CreditCheckCompletedEvent", + "type": "creditCheckCompleteType", + "source": "creditCheckSource", + "correlation": [ + { + "contextAttributeName": "customerId" + } + ] + } + ], + "states": [ + { + "name": "CheckCredit", + "type": "callback", + "action": { + "functionRef": { + "refName": "callCreditCheckMicroservice", + "arguments": { + "customer": "${ .customer }" + } + } + }, + "eventRef": "CreditCheckCompletedEvent", + "timeouts": { + "stateExecTimeout": "PT15M" + }, + "transition": "EvaluateDecision" + }, + { + "name": "EvaluateDecision", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .creditCheck | .decision == \"Approved\" }", + "transition": "StartApplication" + }, + { + "condition": "${ .creditCheck | .decision == \"Denied\" }", + "transition": "RejectApplication" + } + ], + "defaultCondition": { + "transition": "RejectApplication" + } + }, + { + "name": "StartApplication", + "type": "operation", + "actions": [ + { + "subFlowRef": "startApplicationWorkflowId" + } + ], + "end": true + }, + { + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "sendRejectionEmailFunction", + "arguments": { + "applicant": "${ .customer }" + } + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json index e36258f..843669d 100644 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ b/parser/testdata/workflows/eventbaseddataandswitch.sw.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Event Based Switch Transitions", "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "Start" }, diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json index a5cff3e..3510d11 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.json @@ -1,56 +1,52 @@ { - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ + "id": "eventbasedgreeting", + "version": "1.0", + "specVersion": "0.8", + "name": "Event Based Greeting Workflow", + "description": "Event Based Greeting", + "start": "Greet", + "events": [ { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }", - "useData": false - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" + "name": "GreetingEvent", + "type": "greetingEventType", + "source": "greetingEventSource" + } + ], + "functions": [ + { + "name": "greetingFunction", + "operation": "file://myapis/greetingapis.json#greeting" + } + ], + "states": [ + { + "name": "Greet", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "GreetingEvent" + ], + "eventDataFilter": { + "data": "${ .greet }", + "toStateData": "${ .greet }" + }, + "actions": [ + { + "functionRef": { + "refName": "greetingFunction", + "arguments": { + "name": "${ .greet.name }" + } + } + } + ] } - } - } - ] + ], + "stateDataFilter": { + "output": "${ .payload.greeting }" + }, + "end": true } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json index d504bc9..a160546 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.p.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Event Based Greeting Workflow", "description": "Event Based Greeting", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json index e685e0d..df9d7dd 100644 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Event Based Greeting Workflow", "description": "Event Based Greeting", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json index 8a3c78a..946aa39 100644 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Event Based Greeting Workflow", "description": "Event Based Greeting", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json index 9a11e32..3d0075f 100644 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ b/parser/testdata/workflows/eventbasedswitch.sw.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Event Based Switch Transitions", "description": "Event Based Switch Transitions", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "CheckVisaStatus" }, diff --git a/parser/testdata/workflows/eventbasedswitchstate.json b/parser/testdata/workflows/eventbasedswitchstate.json new file mode 100644 index 0000000..c1b48b0 --- /dev/null +++ b/parser/testdata/workflows/eventbasedswitchstate.json @@ -0,0 +1,70 @@ +{ + "id": "eventbasedswitchstate", + "version": "1.0", + "specVersion": "0.8", + "name": "Event Based Switch Transitions", + "description": "Event Based Switch Transitions", + "start": "CheckVisaStatus", + "events": [ + { + "name": "visaApprovedEvent", + "type": "VisaApproved", + "source": "visaCheckSource" + }, + { + "name": "visaRejectedEvent", + "type": "VisaRejected", + "source": "visaCheckSource" + } + ], + "states": [ + { + "name": "CheckVisaStatus", + "type": "switch", + "eventConditions": [ + { + "eventRef": "visaApprovedEvent", + "transition": "HandleApprovedVisa" + }, + { + "eventRef": "visaRejectedEvent", + "transition": "HandleRejectedVisa" + } + ], + "eventTimeout": "PT1H", + "defaultCondition": { + "transition": "HandleNoVisaDecision" + } + }, + { + "name": "HandleApprovedVisa", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleApprovedVisaWorkflowID" + } + ], + "end": true + }, + { + "name": "HandleRejectedVisa", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleRejectedVisaWorkflowID" + } + ], + "end": true + }, + { + "name": "HandleNoVisaDecision", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleNoVisaDecisionWorkflowId" + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/fillglassofwater.json b/parser/testdata/workflows/fillglassofwater.json new file mode 100644 index 0000000..b45d84e --- /dev/null +++ b/parser/testdata/workflows/fillglassofwater.json @@ -0,0 +1,48 @@ +{ + "id": "fillglassofwater", + "name": "Fill glass of water workflow", + "version": "1.0", + "specVersion": "0.8", + "start": "Check if full", + "functions": [ + { + "name": "Increment Current Count Function", + "type": "expression", + "operation": ".counts.current += 1 | .counts.current" + } + ], + "states": [ + { + "name": "Check if full", + "type": "switch", + "dataConditions": [ + { + "name": "Need to fill more", + "condition": "${ .counts.current < .counts.max }", + "transition": "Add Water" + }, + { + "name": "Glass full", + "condition": ".counts.current >= .counts.max", + "end": true + } + ], + "defaultCondition": { + "end": true + } + }, + { + "name": "Add Water", + "type": "operation", + "actions": [ + { + "functionRef": "Increment Current Count Function", + "actionDataFilter": { + "toStateData": ".counts.current" + } + } + ], + "transition": "Check if full" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/finalizeCollegeApplication.json b/parser/testdata/workflows/finalizeCollegeApplication.json new file mode 100644 index 0000000..9c93616 --- /dev/null +++ b/parser/testdata/workflows/finalizeCollegeApplication.json @@ -0,0 +1,74 @@ +{ + "id": "finalizeCollegeApplication", + "name": "Finalize College Application", + "version": "1.0", + "specVersion": "0.8", + "start": "FinalizeApplication", + "events": [ + { + "name": "ApplicationSubmitted", + "type": "org.application.submitted", + "source": "applicationsource", + "correlation": [ + { + "contextAttributeName": "applicantId" + } + ] + }, + { + "name": "SATScoresReceived", + "type": "org.application.satscores", + "source": "applicationsource", + "correlation": [ + { + "contextAttributeName": "applicantId" + } + ] + }, + { + "name": "RecommendationLetterReceived", + "type": "org.application.recommendationLetter", + "source": "applicationsource", + "correlation": [ + { + "contextAttributeName": "applicantId" + } + ] + } + ], + "functions": [ + { + "name": "finalizeApplicationFunction", + "operation": "http://myapis.org/collegeapplicationapi.json#finalize" + } + ], + "states": [ + { + "name": "FinalizeApplication", + "type": "event", + "exclusive": false, + "onEvents": [ + { + "eventRefs": [ + "ApplicationSubmitted", + "SATScoresReceived", + "RecommendationLetterReceived" + ], + "actions": [ + { + "functionRef": { + "refName": "finalizeApplicationFunction", + "arguments": { + "student": "${ .applicantId }" + } + } + } + ] + } + ], + "end": { + "terminate": true + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml index 383147d..ca02f40 100644 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ b/parser/testdata/workflows/greetings-constants-file.sw.yaml @@ -16,7 +16,7 @@ id: greeting version: '1.0' name: Greeting Workflow description: Greet Someone -specVersion: "0.7" +specVersion: "0.8" start: stateName: Greet constants: "testdata/constantsDogs.json" diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml index 193c82b..3259810 100644 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ b/parser/testdata/workflows/greetings-secret-file.sw.yaml @@ -16,7 +16,7 @@ id: greeting version: '1.0' name: Greeting Workflow description: Greet Someone -specVersion: "0.7" +specVersion: "0.8" start: stateName: Greet secrets: "testdata/secrets.json" diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml index 6fbe5e0..2f64a98 100644 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ b/parser/testdata/workflows/greetings-secret.sw.yaml @@ -16,7 +16,7 @@ id: greeting version: '1.0' name: Greeting Workflow description: Greet Someone -specVersion: "0.7" +specVersion: "0.8" start: stateName: Greet secrets: diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json index a9ba296..8adeeb6 100644 --- a/parser/testdata/workflows/greetings.sw.json +++ b/parser/testdata/workflows/greetings.sw.json @@ -1,38 +1,34 @@ { - "id": "greeting", - "version": "1.0", - "name": "Greeting Workflow", - "description": "Greet Someone", - "specVersion": "0.7", - "start": { - "stateName": "Greet" - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ + "id": "greeting", + "version": "1.0", + "specVersion": "0.8", + "name": "Greeting Workflow", + "description": "Greet Someone", + "start": "Greet", + "functions": [ { - "functionRef": { - "refName": "greetingFunction", - "parameters": { - "name": "${ .person | .name }" - } - }, - "actionDataFilter": { - "toStateData": "${ .greeting }" - } + "name": "greetingFunction", + "operation": "file://myapis/greetingapis.json#greeting" } - ], - "end": { - "terminate": true - } - } - ] + ], + "states": [ + { + "name": "Greet", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "greetingFunction", + "arguments": { + "name": "${ .person.name }" + } + }, + "actionDataFilter": { + "results": "${ .greeting }" + } + } + ], + "end": true + } + ] } \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml index 398d56c..47a9ff8 100644 --- a/parser/testdata/workflows/greetings.sw.yaml +++ b/parser/testdata/workflows/greetings.sw.yaml @@ -16,7 +16,7 @@ id: greeting version: '1.0' name: Greeting Workflow description: Greet Someone -specVersion: "0.7" +specVersion: "0.8" start: stateName: Greet functions: diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json index c2a1a1a..5330bc5 100644 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ b/parser/testdata/workflows/greetings_sleep.sw.json @@ -3,7 +3,7 @@ "version": "1.0", "name": "Greeting Workflow", "description": "Greet Someone", - "specVersion": "0.7", + "specVersion": "0.8", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/workflows/handleCarAuctionBid.json b/parser/testdata/workflows/handleCarAuctionBid.json new file mode 100644 index 0000000..6df46b0 --- /dev/null +++ b/parser/testdata/workflows/handleCarAuctionBid.json @@ -0,0 +1,49 @@ +{ + "id": "handleCarAuctionBid", + "version": "1.0", + "specVersion": "0.8", + "name": "Car Auction Bidding Workflow", + "description": "Store a single bid whole the car auction is active", + "start": { + "stateName": "StoreCarAuctionBid", + "schedule": "R/PT2H" + }, + "functions": [ + { + "name": "StoreBidFunction", + "operation": "http://myapis.org/carauctionapi.json#storeBid" + } + ], + "events": [ + { + "name": "CarBidEvent", + "type": "carBidMadeType", + "source": "carBidEventSource" + } + ], + "states": [ + { + "name": "StoreCarAuctionBid", + "type": "event", + "exclusive": true, + "onEvents": [ + { + "eventRefs": [ + "CarBidEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "StoreBidFunction", + "arguments": { + "bid": "${ .bid }" + } + } + } + ] + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/helloworld.json b/parser/testdata/workflows/helloworld.json new file mode 100644 index 0000000..707b6ef --- /dev/null +++ b/parser/testdata/workflows/helloworld.json @@ -0,0 +1,18 @@ +{ + "id": "helloworld", + "version": "1.0", + "specVersion": "0.8", + "name": "Hello World Workflow", + "description": "Inject Hello World", + "start": "Hello State", + "states": [ + { + "name": "Hello State", + "type": "inject", + "data": { + "result": "Hello World!" + }, + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/jobmonitoring.json b/parser/testdata/workflows/jobmonitoring.json new file mode 100644 index 0000000..a11282b --- /dev/null +++ b/parser/testdata/workflows/jobmonitoring.json @@ -0,0 +1,127 @@ +{ + "id": "jobmonitoring", + "version": "1.0", + "specVersion": "0.8", + "name": "Job Monitoring", + "description": "Monitor finished execution of a submitted job", + "start": "SubmitJob", + "functions": [ + { + "name": "submitJob", + "operation": "http://myapis.org/monitorapi.json#doSubmit" + }, + { + "name": "checkJobStatus", + "operation": "http://myapis.org/monitorapi.json#checkStatus" + }, + { + "name": "reportJobSuceeded", + "operation": "http://myapis.org/monitorapi.json#reportSucceeded" + }, + { + "name": "reportJobFailed", + "operation": "http://myapis.org/monitorapi.json#reportFailure" + } + ], + "states": [ + { + "name": "SubmitJob", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "submitJob", + "arguments": { + "name": "${ .job.name }" + } + }, + "actionDataFilter": { + "results": "${ .jobuid }" + } + } + ], + "stateDataFilter": { + "output": "${ .jobuid }" + }, + "transition": "WaitForCompletion" + }, + { + "name": "WaitForCompletion", + "type": "sleep", + "duration": "PT5S", + "transition": "GetJobStatus" + }, + { + "name": "GetJobStatus", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "checkJobStatus", + "arguments": { + "name": "${ .jobuid }" + } + }, + "actionDataFilter": { + "results": "${ .jobstatus }" + } + } + ], + "stateDataFilter": { + "output": "${ .jobstatus }" + }, + "transition": "DetermineCompletion" + }, + { + "name": "DetermineCompletion", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .jobStatus == \"SUCCEEDED\" }", + "transition": "JobSucceeded" + }, + { + "condition": "${ .jobStatus == \"FAILED\" }", + "transition": "JobFailed" + } + ], + "defaultCondition": { + "transition": "WaitForCompletion" + } + }, + { + "name": "JobSucceeded", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "reportJobSuceeded", + "arguments": { + "name": "${ .jobuid }" + } + } + } + ], + "end": true + }, + { + "name": "JobFailed", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "reportJobFailed", + "arguments": { + "name": "${ .jobuid }" + } + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/onboardcustomer.json b/parser/testdata/workflows/onboardcustomer.json new file mode 100644 index 0000000..85cb0d6 --- /dev/null +++ b/parser/testdata/workflows/onboardcustomer.json @@ -0,0 +1,25 @@ +{ + "id": "onboardcustomer", + "version": "1.0", + "specVersion": "0.8", + "name": "Onboard Customer", + "description": "Onboard a Customer", + "start": "Onboard", + "states": [ + { + "name": "Onboard", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "invoke": "async", + "onParentComplete": "continue", + "workflowId": "customeronboardingworkflow", + "version": "1.0" + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/parallelexec.json b/parser/testdata/workflows/parallelexec.json new file mode 100644 index 0000000..7e33893 --- /dev/null +++ b/parser/testdata/workflows/parallelexec.json @@ -0,0 +1,34 @@ +{ + "id": "parallelexec", + "version": "1.0", + "specVersion": "0.8", + "name": "Parallel Execution Workflow", + "description": "Executes two branches in parallel", + "start": "ParallelExec", + "states": [ + { + "name": "ParallelExec", + "type": "parallel", + "completionType": "allOf", + "branches": [ + { + "name": "ShortDelayBranch", + "actions": [ + { + "subFlowRef": "shortdelayworkflowid" + } + ] + }, + { + "name": "LongDelayBranch", + "actions": [ + { + "subFlowRef": "longdelayworkflowid" + } + ] + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/patientVitalsWorkflow.json b/parser/testdata/workflows/patientVitalsWorkflow.json new file mode 100644 index 0000000..a4fd8b5 --- /dev/null +++ b/parser/testdata/workflows/patientVitalsWorkflow.json @@ -0,0 +1,110 @@ +{ + "id": "patientVitalsWorkflow", + "name": "Monitor Patient Vitals", + "version": "1.0", + "specVersion": "0.8", + "start": "MonitorVitals", + "events": [ + { + "name": "HighBodyTemperature", + "type": "org.monitor.highBodyTemp", + "source": "monitoringSource", + "correlation": [ + { + "contextAttributeName": "patientId" + } + ] + }, + { + "name": "HighBloodPressure", + "type": "org.monitor.highBloodPressure", + "source": "monitoringSource", + "correlation": [ + { + "contextAttributeName": "patientId" + } + ] + }, + { + "name": "HighRespirationRate", + "type": "org.monitor.highRespirationRate", + "source": "monitoringSource", + "correlation": [ + { + "contextAttributeName": "patientId" + } + ] + } + ], + "functions": [ + { + "name": "callPulmonologist", + "operation": "http://myapis.org/patientapis.json#callPulmonologist" + }, + { + "name": "sendTylenolOrder", + "operation": "http://myapis.org/patientapis.json#tylenolOrder" + }, + { + "name": "callNurse", + "operation": "http://myapis.org/patientapis.json#callNurse" + } + ], + "states": [ + { + "name": "MonitorVitals", + "type": "event", + "exclusive": true, + "onEvents": [ + { + "eventRefs": [ + "HighBodyTemperature" + ], + "actions": [ + { + "functionRef": { + "refName": "sendTylenolOrder", + "arguments": { + "patientid": "${ .patientId }" + } + } + } + ] + }, + { + "eventRefs": [ + "HighBloodPressure" + ], + "actions": [ + { + "functionRef": { + "refName": "callNurse", + "arguments": { + "patientid": "${ .patientId }" + } + } + } + ] + }, + { + "eventRefs": [ + "HighRespirationRate" + ], + "actions": [ + { + "functionRef": { + "refName": "callPulmonologist", + "arguments": { + "patientid": "${ .patientId }" + } + } + } + ] + } + ], + "end": { + "terminate": true + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml index 0841351..c2a5808 100644 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ b/parser/testdata/workflows/patientonboarding.sw.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 The Serverless Workflow Specification Authors +# Copyright 2022 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ id: patientonboarding name: Patient Onboarding Workflow -version: '1.0' +version: "1.0" +specVersion: "0.8" start: Onboard -specVersion: "0.7" states: - name: Onboard type: event @@ -25,12 +25,19 @@ states: - NewPatientEvent actions: - functionRef: StorePatient + retryRef: ServicesNotAvailableRetryStrategy + retryableErrors: + - ServiceNotAvailable - functionRef: AssignDoctor + retryRef: ServicesNotAvailableRetryStrategy + retryableErrors: + - ServiceNotAvailable - functionRef: ScheduleAppt + retryRef: ServicesNotAvailableRetryStrategy + retryableErrors: + - ServiceNotAvailable onErrors: - - error: ServiceNotAvailable - code: '503' - retryRef: ServicesNotAvailableRetryStrategy + - errorRef: ServiceNotAvailable end: true end: true events: @@ -44,6 +51,9 @@ functions: operation: api/services.json#assignDoctor - name: ScheduleAppt operation: api/services.json#scheduleAppointment +errors: + - name: ServiceNotAvailable + code: "503" retries: - name: ServicesNotAvailableRetryStrategy delay: PT3S diff --git a/parser/testdata/workflows/paymentconfirmation.json b/parser/testdata/workflows/paymentconfirmation.json new file mode 100644 index 0000000..2051126 --- /dev/null +++ b/parser/testdata/workflows/paymentconfirmation.json @@ -0,0 +1,96 @@ +{ + "id": "paymentconfirmation", + "version": "1.0", + "specVersion": "0.8", + "name": "Payment Confirmation Workflow", + "description": "Performs Payment Confirmation", + "functions": "functiondefs.json", + "events": "eventdefs.yml", + "states": [ + { + "name": "PaymentReceived", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "PaymentReceivedEvent" + ], + "actions": [ + { + "name": "checkfunds", + "functionRef": { + "refName": "checkFundsAvailability", + "arguments": { + "account": "${ .accountId }", + "paymentamount": "${ .payment.amount }" + } + } + } + ] + } + ], + "transition": "ConfirmBasedOnFunds" + }, + { + "name": "ConfirmBasedOnFunds", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .funds | .available == \"true\" }", + "transition": "SendPaymentSuccess" + }, + { + "condition": "${ .funds | .available == \"false\" }", + "transition": "SendInsufficientResults" + } + ], + "defaultCondition": { + "transition": "SendPaymentSuccess" + } + }, + { + "name": "SendPaymentSuccess", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "sendSuccessEmail", + "arguments": { + "applicant": "${ .customer }" + } + } + } + ], + "end": { + "produceEvents": [ + { + "eventRef": "ConfirmationCompletedEvent", + "data": "${ .payment }" + } + ] + } + }, + { + "name": "SendInsufficientResults", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "sendInsufficientFundsEmail", + "arguments": { + "applicant": "${ .customer }" + } + } + } + ], + "end": { + "produceEvents": [ + { + "eventRef": "ConfirmationCompletedEvent", + "data": "${ .payment }" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json index 21119a2..7496b32 100644 --- a/parser/testdata/workflows/provisionorders.sw.json +++ b/parser/testdata/workflows/provisionorders.sw.json @@ -1,100 +1,100 @@ { - "id": "provisionorders", - "version": "1.0", - "specVersion": "0.7", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": "ProvisionOrder", - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ], - "states":[ - { - "name":"ProvisionOrder", - "type":"operation", - "actionMode":"sequential", - "actions":[ + "id": "provisionorders", + "version": "1.0", + "specVersion": "0.8", + "name": "Provision Orders", + "description": "Provision Orders and handle errors thrown", + "start": "ProvisionOrder", + "functions": [ { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .order }" - } - } + "name": "provisionOrderFunction", + "operation": "http://myapis.org/provisioningapi.json#doProvision" } - ], - "stateDataFilter": { - "output": "${ .exceptions }" - }, - "transition": "ApplyOrder", - "onErrors": [ + ], + "errors": [ { - "errorRef": "Missing order id", - "transition": "MissingId" + "name": "Missing order id" }, { - "errorRef": "Missing order item", - "transition": "MissingItem" + "name": "Missing order item" }, { - "errorRef": "Missing order quantity", - "transition": "MissingQuantity" + "name": "Missing order quantity" } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ + ], + "states": [ { - "subFlowRef": "handleMissingIdExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ + "name": "ProvisionOrder", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "functionRef": { + "refName": "provisionOrderFunction", + "arguments": { + "order": "${ .order }" + } + } + } + ], + "stateDataFilter": { + "output": "${ .exceptions }" + }, + "transition": "ApplyOrder", + "onErrors": [ + { + "errorRef": "Missing order id", + "transition": "MissingId" + }, + { + "errorRef": "Missing order item", + "transition": "MissingItem" + }, + { + "errorRef": "Missing order quantity", + "transition": "MissingQuantity" + } + ] + }, { - "subFlowRef": "handleMissingItemExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ + "name": "MissingId", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingIdExceptionWorkflow" + } + ], + "end": true + }, { - "subFlowRef": "handleMissingQuantityExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ + "name": "MissingItem", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingItemExceptionWorkflow" + } + ], + "end": true + }, + { + "name": "MissingQuantity", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingQuantityExceptionWorkflow" + } + ], + "end": true + }, { - "subFlowRef": "applyOrderWorkflowId" + "name": "ApplyOrder", + "type": "operation", + "actions": [ + { + "subFlowRef": "applyOrderWorkflowId" + } + ], + "end": true } - ], - "end": true - } - ] -} + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json index 998b974..2bde03c 100644 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ b/parser/testdata/workflows/purchaseorderworkflow.sw.json @@ -1,162 +1,162 @@ { - "id": "order", - "name": "Purchase Order Workflow", - "version": "1.0", - "specVersion": "0.7", - "start": "StartNewOrder", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT30D", - "runBefore": "CancelOrder" - } - }, - "states": [ - { - "name": "StartNewOrder", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderCreatedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogNewOrderCreated" - } - } - ] + "id": "order", + "name": "Purchase Order Workflow", + "version": "1.0", + "specVersion": "0.8", + "start": "StartNewOrder", + "timeouts": { + "workflowExecTimeout": { + "duration": "PT30D", + "runBefore": "CancelOrder" } - ], - "transition": { - "nextState": "WaitForOrderConfirmation" - } }, - { - "name": "WaitForOrderConfirmation", - "type": "event", - "onEvents": [ + "states": [ { - "eventRefs": [ - "OrderConfirmedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderConfirmed" - } + "name": "StartNewOrder", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "OrderCreatedEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogNewOrderCreated" + } + } + ] + } + ], + "transition": { + "nextState": "WaitForOrderConfirmation" } - ] - } - ], - "transition": { - "nextState": "WaitOrderShipped" - } - }, - { - "name": "WaitOrderShipped", - "type": "event", - "onEvents": [ + }, { - "eventRefs": [ - "ShipmentSentEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderShipped" - } + "name": "WaitForOrderConfirmation", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "OrderConfirmedEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogOrderConfirmed" + } + } + ] + } + ], + "transition": { + "nextState": "WaitOrderShipped" } - ] - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderFinishedEvent" - } - ] - } - }, - { - "name": "CancelOrder", - "type": "operation", - "actions": [ + }, { - "functionRef": { - "refName": "CancelOrder" - } - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderCancelledEvent" - } - ] - } - } - ], - "events": [ - { - "name": "OrderCreatedEvent", - "type": "my.company.orders", - "source": "/orders/new", - "correlation": [ + "name": "WaitOrderShipped", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "ShipmentSentEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogOrderShipped" + } + } + ] + } + ], + "end": { + "terminate": true, + "produceEvents": [ + { + "eventRef": "OrderFinishedEvent" + } + ] + } + }, { - "contextAttributeName": "orderid" + "name": "CancelOrder", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "CancelOrder" + } + } + ], + "end": { + "terminate": true, + "produceEvents": [ + { + "eventRef": "OrderCancelledEvent" + } + ] + } } - ] - }, - { - "name": "OrderConfirmedEvent", - "type": "my.company.orders", - "source": "/orders/confirmed", - "correlation": [ + ], + "events": [ + { + "name": "OrderCreatedEvent", + "type": "my.company.orders", + "source": "/orders/new", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, + { + "name": "OrderConfirmedEvent", + "type": "my.company.orders", + "source": "/orders/confirmed", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, { - "contextAttributeName": "orderid" + "name": "ShipmentSentEvent", + "type": "my.company.orders", + "source": "/orders/shipped", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, + { + "name": "OrderFinishedEvent", + "type": "my.company.orders", + "kind": "produced" + }, + { + "name": "OrderCancelledEvent", + "type": "my.company.orders", + "kind": "produced" } - ] - }, - { - "name": "ShipmentSentEvent", - "type": "my.company.orders", - "source": "/orders/shipped", - "correlation": [ + ], + "functions": [ + { + "name": "LogNewOrderCreated", + "operation": "http.myorg.io/ordersservices.json#logcreated" + }, + { + "name": "LogOrderConfirmed", + "operation": "http.myorg.io/ordersservices.json#logconfirmed" + }, + { + "name": "LogOrderShipped", + "operation": "http.myorg.io/ordersservices.json#logshipped" + }, { - "contextAttributeName": "orderid" + "name": "CancelOrder", + "operation": "http.myorg.io/ordersservices.json#calcelorder" } - ] - }, - { - "name": "OrderFinishedEvent", - "type": "my.company.orders", - "kind": "produced" - }, - { - "name": "OrderCancelledEvent", - "type": "my.company.orders", - "kind": "produced" - } - ], - "functions": [ - { - "name": "LogNewOrderCreated", - "operation": "http.myorg.io/ordersservices.json#logcreated" - }, - { - "name": "LogOrderConfirmed", - "operation": "http.myorg.io/ordersservices.json#logconfirmed" - }, - { - "name": "LogOrderShipped", - "operation": "http.myorg.io/ordersservices.json#logshipped" - }, - { - "name": "CancelOrder", - "operation": "http.myorg.io/ordersservices.json#calcelorder" - } - ] + ] } \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json index c875863..b00b964 100644 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json @@ -2,7 +2,7 @@ "id": "roomreadings", "name": "Room Temp and Humidity Workflow", "version": "1.0", - "specVersion": "0.7", + "specVersion": "0.8", "start": "ConsumeReading", "timeouts": "testdata/timeouts.json", "keepActive": true, diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json index b322f8c..90c7c62 100644 --- a/parser/testdata/workflows/roomreadings.timeouts.sw.json +++ b/parser/testdata/workflows/roomreadings.timeouts.sw.json @@ -1,85 +1,88 @@ { - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.7", - "start": "ConsumeReading", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } - }, - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } + "id": "roomreadings", + "name": "Room Temp and Humidity Workflow", + "version": "1.0", + "specVersion": "0.8", + "start": "ConsumeReading", + "timeouts": { + "workflowExecTimeout": { + "duration": "PT1H", + "runBefore": "GenerateReport" } - ], - "end": true }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ + "keepActive": true, + "states": [ { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" + "name": "ConsumeReading", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "TemperatureEvent", + "HumidityEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogReading" + } + } + ], + "eventDataFilter": { + "toStateData": "${ .readings }" + } + } + ], + "end": true + }, + { + "name": "GenerateReport", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "ProduceReport", + "arguments": { + "data": "${ .readings }" + } + } + } + ], + "end": { + "terminate": true } - } } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ + ], + "events": [ + { + "name": "TemperatureEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] + }, { - "contextAttributeName": "roomId" + "name": "HumidityEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ + ], + "functions": [ + { + "name": "LogReading", + "operation": "http.myorg.io/ordersservices.json#logreading" + }, { - "contextAttributeName": "roomId" + "name": "ProduceReport", + "operation": "http.myorg.io/ordersservices.json#produceReport" } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcloudeventonprovision.json b/parser/testdata/workflows/sendcloudeventonprovision.json new file mode 100644 index 0000000..7e5bc37 --- /dev/null +++ b/parser/testdata/workflows/sendcloudeventonprovision.json @@ -0,0 +1,47 @@ +{ + "id": "sendcloudeventonprovision", + "version": "1.0", + "specVersion": "0.8", + "name": "Send CloudEvent on provision completion", + "start": "ProvisionOrdersState", + "events": [ + { + "name": "provisioningCompleteEvent", + "type": "provisionCompleteType", + "kind": "produced" + } + ], + "functions": [ + { + "name": "provisionOrderFunction", + "operation": "http://myapis.org/provisioning.json#doProvision" + } + ], + "states": [ + { + "name": "ProvisionOrdersState", + "type": "foreach", + "inputCollection": "${ .orders }", + "iterationParam": "singleorder", + "outputCollection": "${ .provisionedOrders }", + "actions": [ + { + "functionRef": { + "refName": "provisionOrderFunction", + "arguments": { + "order": "${ .singleorder }" + } + } + } + ], + "end": { + "produceEvents": [ + { + "eventRef": "provisioningCompleteEvent", + "data": "${ .provisionedOrders }" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcustomeremail.json b/parser/testdata/workflows/sendcustomeremail.json new file mode 100644 index 0000000..7e8d010 --- /dev/null +++ b/parser/testdata/workflows/sendcustomeremail.json @@ -0,0 +1,32 @@ +{ + "id": "sendcustomeremail", + "version": "1.0", + "specVersion": "0.8", + "name": "Send customer email workflow", + "description": "Send email to a customer", + "start": "Send Email", + "functions": [ + { + "name": "emailFunction", + "operation": "file://myapis/emailapis.json#sendEmail" + } + ], + "states": [ + { + "name": "Send Email", + "type": "operation", + "actions": [ + { + "functionRef": { + "invoke": "async", + "refName": "emailFunction", + "arguments": { + "customer": "${ .customer }" + } + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/solvemathproblems.json b/parser/testdata/workflows/solvemathproblems.json new file mode 100644 index 0000000..a3083d0 --- /dev/null +++ b/parser/testdata/workflows/solvemathproblems.json @@ -0,0 +1,37 @@ +{ + "id": "solvemathproblems", + "version": "1.0", + "specVersion": "0.8", + "name": "Solve Math Problems Workflow", + "description": "Solve math problems", + "start": "Solve", + "functions": [ + { + "name": "solveMathExpressionFunction", + "operation": "http://myapis.org/mapthapis.json#solveExpression" + } + ], + "states": [ + { + "name": "Solve", + "type": "foreach", + "inputCollection": "${ .expressions }", + "iterationParam": "singleexpression", + "outputCollection": "${ .results }", + "actions": [ + { + "functionRef": { + "refName": "solveMathExpressionFunction", + "arguments": { + "expression": "${ .singleexpression }" + } + } + } + ], + "stateDataFilter": { + "output": "${ .results }" + }, + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/vitalscheck.json b/parser/testdata/workflows/vitalscheck.json new file mode 100644 index 0000000..feb1c41 --- /dev/null +++ b/parser/testdata/workflows/vitalscheck.json @@ -0,0 +1,53 @@ +{ + "id": "vitalscheck", + "name": "Car Vitals Check", + "version": "1.0", + "specVersion": "0.8", + "start": "CheckVitals", + "states": [ + { + "name": "CheckVitals", + "type": "operation", + "actions": [ + { + "functionRef": "Check Tire Pressure" + }, + { + "functionRef": "Check Oil Pressure" + }, + { + "functionRef": "Check Coolant Level" + }, + { + "functionRef": "Check Battery" + } + ], + "end": { + "produceEvents": [ + { + "eventRef": "DisplayChecksOnDashboard", + "data": "${ .evaluations }" + } + ] + } + } + ], + "functions": [ + { + "name": "checkTirePressure", + "operation": "mycarservices.json#checktirepressure" + }, + { + "name": "checkOilPressure", + "operation": "mycarservices.json#checkoilpressure" + }, + { + "name": "checkCoolantLevel", + "operation": "mycarservices.json#checkcoolantlevel" + }, + { + "name": "checkBattery", + "operation": "mycarservices.json#checkbattery" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json index 8bc0eb5..c0b72c8 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json +++ b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": [{ "name": "testAuth", "scheme": "bearer", diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json index 2a712c6..e72712d 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json +++ b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": [{ "name": "testAuth", "scheme": "bearer", diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json index 4db741b..d01c35e 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json +++ b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": { "name": "testAuth", "scheme": "bearer", diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json index 8beb050..101b9bf 100644 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json @@ -4,7 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", - "specVersion": "0.7", + "specVersion": "0.8", "auth": [ { "name": "testAuth", From 85c2f4d44f3ae85b921f0ef69680af6ee2bae085 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 24 Oct 2022 21:58:04 +0800 Subject: [PATCH 033/110] fix(*): parallelState with completeType defaults to allOf (#106) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/parallel_state.go | 134 ++++++++++++++++++++++++ model/parallel_state_test.go | 191 +++++++++++++++++++++++++++++++++++ model/states.go | 27 ----- model/workflow.go | 18 ---- 4 files changed, 325 insertions(+), 45 deletions(-) create mode 100644 model/parallel_state.go create mode 100644 model/parallel_state_test.go diff --git a/model/parallel_state.go b/model/parallel_state.go new file mode 100644 index 0000000..4bb77bb --- /dev/null +++ b/model/parallel_state.go @@ -0,0 +1,134 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "strconv" + + validator "github.com/go-playground/validator/v10" + "k8s.io/apimachinery/pkg/util/intstr" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +// CompletionType define on how to complete branch execution. +type CompletionType string + +const ( + // CompletionTypeAllOf defines all branches must complete execution before the state can transition/end. + CompletionTypeAllOf CompletionType = "allOf" + // CompletionTypeAtLeast defines state can transition/end once at least the specified number of branches + // have completed execution. + CompletionTypeAtLeast CompletionType = "atLeast" +) + +// ParallelState Consists of a number of states that are executed in parallel +type ParallelState struct { + BaseState + // Branch Definitions + Branches []Branch `json:"branches" validate:"required,min=1,dive"` + // Option types on how to complete branch execution. + // Defaults to `allOf` + CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneof=allOf atLeast"` + + // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." + // TODO: change this field to unmarshal result as int + NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` + // State specific timeouts + Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` +} + +type parallelStateForUnmarshal ParallelState + +// UnmarshalJSON unmarshal ParallelState object from json bytes +func (s *ParallelState) UnmarshalJSON(b []byte) error { + if len(b) == 0 { + // TODO: Normalize error messages + return fmt.Errorf("no bytes to unmarshal") + } + + v := ¶llelStateForUnmarshal{ + CompletionType: CompletionTypeAllOf, + } + err := json.Unmarshal(b, v) + if err != nil { + return err + } + + *s = ParallelState(*v) + + return nil +} + +// Branch Definition +type Branch struct { + // Branch name + Name string `json:"name" validate:"required"` + // Actions to be executed in this branch + Actions []Action `json:"actions" validate:"required,min=1,dive"` + // Timeouts State specific timeouts + Timeouts *BranchTimeouts `json:"timeouts,omitempty"` +} + +// BranchTimeouts defines the specific timeout settings for branch +type BranchTimeouts struct { + // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` +} + +// ParallelStateTimeout defines the specific timeout settings for parallel state +type ParallelStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` +} + +// ParallelStateStructLevelValidation custom validator for ParallelState +func ParallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + parallelStateObj := structLevel.Current().Interface().(ParallelState) + + if parallelStateObj.CompletionType == CompletionTypeAllOf { + return + } + + switch parallelStateObj.NumCompleted.Type { + case intstr.Int: + if parallelStateObj.NumCompleted.IntVal <= 0 { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") + } + case intstr.String: + v, err := strconv.Atoi(parallelStateObj.NumCompleted.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", err.Error()) + return + } + + if v <= 0 { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") + } + } +} + +func init() { + val.GetValidator().RegisterStructValidationCtx( + ParallelStateStructLevelValidation, + ParallelState{}, + ) +} diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go new file mode 100644 index 0000000..49daf42 --- /dev/null +++ b/model/parallel_state_test.go @@ -0,0 +1,191 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestParallelStateUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect *ParallelState + err string + } + testCases := []testCase{ + { + desp: "all field set", + data: `{"completionType": "allOf", "numCompleted": 1}`, + expect: &ParallelState{ + CompletionType: CompletionTypeAllOf, + NumCompleted: intstr.FromInt(1), + }, + err: ``, + }, + { + desp: "all optional field not set", + data: `{"numCompleted": 1}`, + expect: &ParallelState{ + CompletionType: CompletionTypeAllOf, + NumCompleted: intstr.FromInt(1), + }, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v ParallelState + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, &v) + }) + } +} + +func TestParallelStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + state *ParallelState + err string + } + testCases := []testCase{ + { + desp: "normal", + state: &ParallelState{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + }, + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAllOf, + NumCompleted: intstr.FromInt(1), + }, + err: ``, + }, + { + desp: "invalid completeType", + state: &ParallelState{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + }, + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAllOf + "1", + }, + err: `Key: 'ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, + }, + { + desp: "invalid numCompleted `int`", + state: &ParallelState{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + }, + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromInt(0), + }, + err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + { + desp: "invalid numCompleted string format", + state: &ParallelState{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + }, + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("a"), + }, + err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + { + desp: "normal", + state: &ParallelState{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + }, + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("0"), + }, + err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.state) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/states.go b/model/states.go index 69722aa..e017e5a 100644 --- a/model/states.go +++ b/model/states.go @@ -41,11 +41,6 @@ const ( // StateTypeSleep ... StateTypeSleep = "sleep" - // CompletionTypeAllOf ... - CompletionTypeAllOf CompletionType = "allOf" - // CompletionTypeAtLeast ... - CompletionTypeAtLeast CompletionType = "atLeast" - // ForEachModeTypeSequential ... ForEachModeTypeSequential ForEachModeType = "sequential" // ForEachModeTypeParallel ... @@ -82,9 +77,6 @@ func getActionsModelMapping(stateType string, s map[string]interface{}) (State, // StateType ... type StateType string -// CompletionType Option types on how to complete branch execution. -type CompletionType string - // ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) type ForEachModeType string @@ -172,25 +164,6 @@ type OperationStateTimeout struct { ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` } -// ParallelState Consists of a number of states that are executed in parallel -type ParallelState struct { - BaseState - // Branch Definitions - Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. - CompletionType CompletionType `json:"completionType,omitempty"` - // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." - NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` - // State specific timeouts - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` -} - -// ParallelStateTimeout ... -type ParallelStateTimeout struct { - StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` -} - // InjectState ... type InjectState struct { BaseState diff --git a/model/workflow.go b/model/workflow.go index e8b0b2a..c2e8181 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -555,24 +555,6 @@ type StateDataFilter struct { Output string `json:"output,omitempty"` } -// Branch Definition -type Branch struct { - // Branch name - Name string `json:"name" validate:"required"` - // Actions to be executed in this branch - Actions []Action `json:"actions" validate:"required,min=1"` - // Timeouts State specific timeouts - Timeouts *BranchTimeouts `json:"timeouts,omitempty"` -} - -// BranchTimeouts ... -type BranchTimeouts struct { - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` -} - // DataInputSchema ... type DataInputSchema struct { Schema string `json:"schema" validate:"required"` From 7723e03bde4368dc11fd6a5d451e1d520af246eb Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 2 Nov 2022 20:42:38 +0800 Subject: [PATCH 034/110] fix(109): split foreach state to separate file (#118) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/foreach_state.go | 113 ++++++++++++++++++++ model/foreach_state_test.go | 206 ++++++++++++++++++++++++++++++++++++ model/states.go | 52 --------- 3 files changed, 319 insertions(+), 52 deletions(-) create mode 100644 model/foreach_state.go create mode 100644 model/foreach_state_test.go diff --git a/model/foreach_state.go b/model/foreach_state.go new file mode 100644 index 0000000..ef30dd5 --- /dev/null +++ b/model/foreach_state.go @@ -0,0 +1,113 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "strconv" + + validator "github.com/go-playground/validator/v10" + "k8s.io/apimachinery/pkg/util/intstr" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(ForEachStateStructLevelValidation, ForEachState{}) +} + +// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) +type ForEachModeType string + +const ( + // ForEachModeTypeSequential specifies iterations should be done sequentially. + ForEachModeTypeSequential ForEachModeType = "sequential" + // ForEachModeTypeParallel specifies iterations should be done parallel. + ForEachModeTypeParallel ForEachModeType = "parallel" +) + +// ForEachState used to execute actions for each element of a data set. +type ForEachState struct { + BaseState + // Workflow expression selecting an array element of the states data + InputCollection string `json:"inputCollection" validate:"required"` + // Workflow expression specifying an array element of the states data to add the results of each iteration + OutputCollection string `json:"outputCollection,omitempty"` + // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array + IterationParam string `json:"iterationParam,omitempty"` + // Specifies how upper bound on how many iterations may run in parallel + BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` + // Actions to be executed for each of the elements of inputCollection + Actions []Action `json:"actions,omitempty" validate:"required,min=1,dive"` + // State specific timeout + Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` + // Mode Specifies how iterations are to be performed (sequentially or in parallel) + // Defaults to parallel + Mode ForEachModeType `json:"mode,omitempty"` +} + +type forEachStateForUnmarshal ForEachState + +func (f *ForEachState) UnmarshalJSON(data []byte) error { + v := forEachStateForUnmarshal{ + Mode: ForEachModeTypeParallel, + } + err := json.Unmarshal(data, &v) + if err != nil { + return fmt.Errorf("forEachState value '%s' is not supported, it must be an object or string", string(data)) + } + + *f = ForEachState(v) + return nil +} + +// ForEachStateStructLevelValidation custom validator for ForEachState +func ForEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + stateObj := structLevel.Current().Interface().(ForEachState) + + if stateObj.Mode != ForEachModeTypeParallel { + return + } + + if stateObj.BatchSize == nil { + return + } + + switch stateObj.BatchSize.Type { + case intstr.Int: + if stateObj.BatchSize.IntVal <= 0 { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") + } + case intstr.String: + v, err := strconv.Atoi(stateObj.BatchSize.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", err.Error()) + return + } + + if v <= 0 { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") + } + } +} + +// ForEachStateTimeout defines timeout settings for foreach state +type ForEachStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` +} diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go new file mode 100644 index 0000000..6eacde0 --- /dev/null +++ b/model/foreach_state_test.go @@ -0,0 +1,206 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestForEachStateUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect *ForEachState + err string + } + testCases := []testCase{ + { + desp: "all field", + data: `{"mode": "sequential"}`, + expect: &ForEachState{ + Mode: ForEachModeTypeSequential, + }, + err: ``, + }, + { + desp: "mode unset", + data: `{}`, + expect: &ForEachState{ + Mode: ForEachModeTypeParallel, + }, + err: ``, + }, + { + desp: "invalid json format", + data: `{"mode": 1}`, + expect: nil, + err: `forEachState value '{"mode": 1}' is not supported, it must be an object or string`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v ForEachState + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, &v) + }) + } +} + +func TestForEachStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + state ForEachState + err string + } + testCases := []testCase{ + { + desp: "normal test & sequential", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeSequential, + }, + err: ``, + }, + { + desp: "normal test & parallel int", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 1, + }, + }, + err: ``, + }, + { + desp: "normal test & parallel string", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "1", + }, + }, + err: ``, + }, + { + desp: "invalid parallel int", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 0, + }, + }, + err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + { + desp: "invalid parallel string", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "0", + }, + }, + err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + { + desp: "invalid parallel string format", + state: ForEachState{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "a", + }, + }, + err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.state) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/states.go b/model/states.go index e017e5a..0b1438e 100644 --- a/model/states.go +++ b/model/states.go @@ -16,9 +16,6 @@ package model import ( "encoding/json" - "fmt" - - "k8s.io/apimachinery/pkg/util/intstr" ) const ( @@ -40,11 +37,6 @@ const ( StateTypeCallback = "callback" // StateTypeSleep ... StateTypeSleep = "sleep" - - // ForEachModeTypeSequential ... - ForEachModeTypeSequential ForEachModeType = "sequential" - // ForEachModeTypeParallel ... - ForEachModeTypeParallel ForEachModeType = "parallel" ) func getActionsModelMapping(stateType string, s map[string]interface{}) (State, bool) { @@ -77,9 +69,6 @@ func getActionsModelMapping(stateType string, s map[string]interface{}) (State, // StateType ... type StateType string -// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) -type ForEachModeType string - // State definition for a Workflow state type State interface { GetID() string @@ -178,47 +167,6 @@ type InjectStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } -// ForEachState ... -type ForEachState struct { - BaseState - // Workflow expression selecting an array element of the states data - InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration - OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array - IterationParam string `json:"iterationParam,omitempty"` - // Specifies how upper bound on how many iterations may run in parallel - BatchSize intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection - Actions []Action `json:"actions,omitempty"` - // State specific timeout - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Mode Specifies how iterations are to be performed (sequentially or in parallel) - // Defaults to parallel - Mode ForEachModeType `json:"mode,omitempty"` -} - -type forEachStateForUnmarshal ForEachState - -func (f *ForEachState) UnmarshalJSON(data []byte) error { - v := forEachStateForUnmarshal{ - Mode: StateTypeParallel, - } - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("forEachState value '%s' is not supported, it must be an object or string", string(data)) - } - - *f = ForEachState(v) - return nil -} - -// ForEachStateTimeout ... -type ForEachStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` -} - // CallbackState ... type CallbackState struct { BaseState From bafff786674717fc3ab488990e8e17d68725eacb Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 2 Nov 2022 20:44:09 +0800 Subject: [PATCH 035/110] fix(121): split operation state to separate file (#124) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/operation_state.go | 53 ++++++++++++++++++++++++++ model/operation_state_test.go | 72 +++++++++++++++++++++++++++++++++++ model/states.go | 17 --------- 3 files changed, 125 insertions(+), 17 deletions(-) create mode 100644 model/operation_state.go create mode 100644 model/operation_state_test.go diff --git a/model/operation_state.go b/model/operation_state.go new file mode 100644 index 0000000..95f4a68 --- /dev/null +++ b/model/operation_state.go @@ -0,0 +1,53 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" +) + +// OperationState defines a set of actions to be performed in sequence or in parallel. +type OperationState struct { + BaseState + // Specifies whether actions are performed in sequence or in parallel + // Defaults to sequential + ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` + // Actions to be performed + Actions []Action `json:"actions" validate:"required,min=1,dive"` + // State specific timeouts + Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` +} + +type operationStateForUnmarshal OperationState + +// UnmarshalJSON unmarshal OperationState object from json bytes +func (o *OperationState) UnmarshalJSON(data []byte) error { + v := operationStateForUnmarshal{ + ActionMode: ActionModeSequential, + } + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + + *o = OperationState(v) + return nil +} + +// OperationStateTimeout defines the specific timeout settings for operation state +type OperationStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` +} diff --git a/model/operation_state_test.go b/model/operation_state_test.go new file mode 100644 index 0000000..4939797 --- /dev/null +++ b/model/operation_state_test.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOperationStateUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect OperationState + err string + } + testCases := []testCase{ + { + desp: "all fields set", + data: `{"actionMode": "parallel"}`, + expect: OperationState{ + ActionMode: ActionModeParallel, + }, + err: ``, + }, + { + desp: "actionMode unset", + data: `{}`, + expect: OperationState{ + ActionMode: ActionModeSequential, + }, + err: ``, + }, + { + desp: "invalid object format", + data: `{"actionMode": parallel}`, + expect: OperationState{ + ActionMode: ActionModeParallel, + }, + err: `invalid character 'p' looking for beginning of value`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + v := OperationState{} + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/states.go b/model/states.go index 0b1438e..9acdb9a 100644 --- a/model/states.go +++ b/model/states.go @@ -136,23 +136,6 @@ func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFi // GetMetadata ... func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } -// OperationState Defines actions be performed. Does not wait for incoming events -type OperationState struct { - BaseState - // Specifies whether actions are performed in sequence or in parallel - ActionMode ActionMode `json:"actionMode,omitempty"` - // Actions to be performed - Actions []Action `json:"actions" validate:"required,min=1,dive"` - // State specific timeouts - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` -} - -// OperationStateTimeout ... -type OperationStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` -} - // InjectState ... type InjectState struct { BaseState From 96039609dc0e9195cb8d4736cea588a8f204cf98 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 14 Nov 2022 22:34:26 +0800 Subject: [PATCH 036/110] fix(117): split callback state to separate file (#119) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/callback_state.go | 36 ++++++++++++++++++++++++++++++++++++ model/states.go | 20 -------------------- 2 files changed, 36 insertions(+), 20 deletions(-) create mode 100644 model/callback_state.go diff --git a/model/callback_state.go b/model/callback_state.go new file mode 100644 index 0000000..42b1578 --- /dev/null +++ b/model/callback_state.go @@ -0,0 +1,36 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// CallbackState executes a function and waits for callback event that indicates +// completion of the task. +type CallbackState struct { + BaseState + // Defines the action to be executed + Action Action `json:"action" validate:"required"` + // References a unique callback event name in the defined workflow events + EventRef string `json:"eventRef" validate:"required"` + // Time period to wait for incoming events (ISO 8601 format) + Timeouts *CallbackStateTimeout `json:"timeouts" validate:"omitempty"` + // Event data filter + EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` +} + +// CallbackStateTimeout defines timeout settings for callback state +type CallbackStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` +} diff --git a/model/states.go b/model/states.go index 9acdb9a..124b578 100644 --- a/model/states.go +++ b/model/states.go @@ -150,26 +150,6 @@ type InjectStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } -// CallbackState ... -type CallbackState struct { - BaseState - // Defines the action to be executed - Action Action `json:"action" validate:"required"` - // References a unique callback event name in the defined workflow events - EventRef string `json:"eventRef" validate:"required"` - // Time period to wait for incoming events (ISO 8601 format) - Timeouts CallbackStateTimeout `json:"timeouts" validate:"required"` - // Event data filter - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -// CallbackStateTimeout ... -type CallbackStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - // BaseSwitchState ... type BaseSwitchState struct { BaseState From 588fc1fe84102bbf97845f77cc06e65e5f97a607 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 14 Nov 2022 22:39:42 +0800 Subject: [PATCH 037/110] chore(*): add prow & labeler (#128) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- .github/labeler.yml | 16 +++++++ .github/labels.yml | 27 ++++++++++++ .github/workflows/prow_commands.yml | 43 +++++++++++++++++++ .../prow_cron_pull_request_merge.yml | 28 ++++++++++++ .github/workflows/prow_remove_lgtm.yml | 25 +++++++++++ .github/workflows/pull_request_labeler.yml | 28 ++++++++++++ .github/workflows/stale.yaml | 34 +++++++++++++++ OWNERS | 7 +++ 8 files changed, 208 insertions(+) create mode 100644 .github/labeler.yml create mode 100644 .github/labels.yml create mode 100644 .github/workflows/prow_commands.yml create mode 100644 .github/workflows/prow_cron_pull_request_merge.yml create mode 100644 .github/workflows/prow_remove_lgtm.yml create mode 100644 .github/workflows/pull_request_labeler.yml create mode 100644 .github/workflows/stale.yaml create mode 100644 OWNERS diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..5d4b3d7 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,16 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source: + - '**/*' \ No newline at end of file diff --git a/.github/labels.yml b/.github/labels.yml new file mode 100644 index 0000000..7f6aa94 --- /dev/null +++ b/.github/labels.yml @@ -0,0 +1,27 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +area: + - 'bug' + - 'important' + - 'feature' + +kind: + - 'failing-test' + - 'cleanup' + +priority: + - 'P0' + - 'P1' + - 'P2' diff --git a/.github/workflows/prow_commands.yml b/.github/workflows/prow_commands.yml new file mode 100644 index 0000000..d854fae --- /dev/null +++ b/.github/workflows/prow_commands.yml @@ -0,0 +1,43 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: "Prow github actions" +on: + issue_comment: + types: [created] + +jobs: + execute: + runs-on: ubuntu-latest + steps: + - uses: jpmcb/prow-github-actions@v1.1.3 + with: + prow-commands: | + /assign + /unassign + /approve + /retitle + /area + /kind + /priority + /remove + /lgtm + /close + /reopen + /lock + /milestone + /hold + /cc + /uncc + github-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file diff --git a/.github/workflows/prow_cron_pull_request_merge.yml b/.github/workflows/prow_cron_pull_request_merge.yml new file mode 100644 index 0000000..e48e6e1 --- /dev/null +++ b/.github/workflows/prow_cron_pull_request_merge.yml @@ -0,0 +1,28 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: "Merge on lgtm label" +on: + schedule: + - cron: "30 * * * *" + +jobs: + execute: + runs-on: ubuntu-latest + steps: + - uses: jpmcb/prow-github-actions@v1.1.3 + with: + jobs: 'lgtm' + github-token: "${{ secrets.GITHUB_TOKEN }}" + merge-method: 'squash' \ No newline at end of file diff --git a/.github/workflows/prow_remove_lgtm.yml b/.github/workflows/prow_remove_lgtm.yml new file mode 100644 index 0000000..455f696 --- /dev/null +++ b/.github/workflows/prow_remove_lgtm.yml @@ -0,0 +1,25 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: "Pull Request update lgtm" +on: pull_request + +jobs: + execute: + runs-on: ubuntu-latest + steps: + - uses: jpmcb/prow-github-actions@v1.1.3 + with: + jobs: 'lgtm' + github-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file diff --git a/.github/workflows/pull_request_labeler.yml b/.github/workflows/pull_request_labeler.yml new file mode 100644 index 0000000..f8eab65 --- /dev/null +++ b/.github/workflows/pull_request_labeler.yml @@ -0,0 +1,28 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: "Pull Request Labeler" +on: +- pull_request_target + +jobs: + triage: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v4.0.2 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml new file mode 100644 index 0000000..ccd7367 --- /dev/null +++ b/.github/workflows/stale.yaml @@ -0,0 +1,34 @@ +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Mark stale issues and pull requests +on: + schedule: + - cron: "0 0 * * *" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' + stale-pr-message: 'This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' + stale-issue-label: 'Stale Issue' + exempt-issue-labels: 'Status: Blocked, Status: In progress, Status: On hold, Status: Awaiting response' + stale-pr-label: 'Stale PR' + exempt-pr-labels: 'Status: Blocked, Status: In progress, Status: On hold, Status: Awaiting response' + days-before-stale: 45 + days-before-close: 20 \ No newline at end of file diff --git a/OWNERS b/OWNERS new file mode 100644 index 0000000..61177ce --- /dev/null +++ b/OWNERS @@ -0,0 +1,7 @@ +# List of usernames who may use /lgtm +reviewers: +- ricardozanini + +# List of usernames who may use /approve +approvers: +- ricardozanini \ No newline at end of file From 02fbe11007e63999a7b032a164597d91ebe1e1bf Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Mon, 14 Nov 2022 11:41:42 -0300 Subject: [PATCH 038/110] allow empty auth definitions in parser (#111) * allow empty auth definitions in parser fixes https://github.com/serverlessworkflow/sdk-go/issues/110 Signed-off-by: spolti * allow empty auth definitions in parser fixes https://github.com/serverlessworkflow/sdk-go/issues/110 fixes https://github.com/serverlessworkflow/sdk-go/issues/126 Signed-off-by: spolti * fix(*): use different types to auth validate Signed-off-by: lsytj0413 <511121939@qq.com> * small change Signed-off-by: spolti Signed-off-by: spolti Signed-off-by: lsytj0413 <511121939@qq.com> Co-authored-by: lsytj0413 <511121939@qq.com> --- model/auth.go | 65 ----------------- model/auth_test.go | 73 ------------------- model/workflow.go | 57 ++++++++++++++- parser/parser_test.go | 163 ++++++++++++++++++++++++++++++++++++++---- 4 files changed, 206 insertions(+), 152 deletions(-) diff --git a/model/auth.go b/model/auth.go index c6cd7f9..b23f8ec 100644 --- a/model/auth.go +++ b/model/auth.go @@ -17,35 +17,8 @@ package model import ( "encoding/json" "fmt" - "reflect" - - validator "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func init() { - val.GetValidator().RegisterStructValidation(AuthDefinitionsStructLevelValidation, AuthDefinitions{}) -} - -// AuthDefinitionsStructLevelValidation custom validator for unique name of the auth methods -func AuthDefinitionsStructLevelValidation(structLevel validator.StructLevel) { - authDefs := structLevel.Current().Interface().(AuthDefinitions) - dict := map[string]bool{} - - for _, a := range authDefs.Defs { - if !dict[a.Name] { - dict[a.Name] = true - } else { - structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") - } - } -} - -// AuthDefinitions used to define authentication information applied to resources defined in the operation property of function definitions -type AuthDefinitions struct { - Defs []Auth -} - // AuthType ... type AuthType string @@ -92,44 +65,6 @@ type Auth struct { Properties AuthProperties `json:"properties" validate:"required"` } -// UnmarshalJSON implements json.Unmarshaler -func (a *AuthDefinitions) UnmarshalJSON(b []byte) error { - if len(b) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("no bytes to unmarshal") - } - - // See if we can guess based on the first character - switch b[0] { - case '"': - return a.unmarshalFile(b) - case '[': - return a.unmarshalMany(b) - } - - return fmt.Errorf("auth value '%s' is not supported, it must be an array or string", string(b)) -} - -func (a *AuthDefinitions) unmarshalFile(data []byte) error { - b, err := unmarshalFile(data) - if err != nil { - return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) - } - - return a.unmarshalMany(b) -} - -func (a *AuthDefinitions) unmarshalMany(data []byte) error { - var auths []Auth - err := json.Unmarshal(data, &auths) - if err != nil { - return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) - } - - a.Defs = auths - return nil -} - // UnmarshalJSON Auth definition func (a *Auth) UnmarshalJSON(data []byte) error { auth := make(map[string]json.RawMessage) diff --git a/model/auth_test.go b/model/auth_test.go index 870d8f8..44d49e1 100644 --- a/model/auth_test.go +++ b/model/auth_test.go @@ -19,81 +19,8 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func TestAuthDefinitionsStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - authDefs AuthDefinitions - err string - } - testCases := []testCase{ - { - desp: "nil defs", - authDefs: AuthDefinitions{ - Defs: nil, - }, - err: ``, - }, - { - desp: "zero length defs", - authDefs: AuthDefinitions{ - Defs: []Auth{}, - }, - err: ``, - }, - { - desp: "multi unique defs", - authDefs: AuthDefinitions{ - Defs: []Auth{ - { - Name: "1", - }, - { - Name: "2", - }, - { - Name: "3", - }, - }, - }, - err: ``, - }, - { - desp: "multi non-unique defs", - authDefs: AuthDefinitions{ - Defs: []Auth{ - { - Name: "1", - }, - { - Name: "2", - }, - { - Name: "1", - }, - }, - }, - err: `Key: 'AuthDefinitions.Name' Error:Field validation for 'Name' failed on the 'reqnameunique' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.authDefs) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { t.Run("BearerAuthProperties", func(t *testing.T) { a1JSON := `{ diff --git a/model/workflow.go b/model/workflow.go index c2e8181..53b9706 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -57,6 +57,7 @@ const ( func init() { val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) + val.GetValidator().RegisterStructValidation(BaseWorkflowStructLevelValidation, BaseWorkflow{}) } func continueAsStructLevelValidation(structLevel validator.StructLevel) { @@ -108,7 +109,60 @@ type BaseWorkflow struct { // Auth definitions can be used to define authentication information that should be applied to resources defined in the operation // property of function definitions. It is not used as authentication information for the function invocation, // but just to access the resource containing the function invocation information. - Auth AuthDefinitions `json:"auth,omitempty"` + Auth AuthArray `json:"auth,omitempty" validate:"omitempty"` +} + +// BaseWorkflowStructLevelValidation custom validator for unique name of the auth methods +func BaseWorkflowStructLevelValidation(structLevel validator.StructLevel) { + // NOTE: we cannot add the custom validation of auth to AuthArray + // because `RegisterStructValidation` only works with struct type + wf := structLevel.Current().Interface().(BaseWorkflow) + dict := map[string]bool{} + + for _, a := range wf.Auth { + if !dict[a.Name] { + dict[a.Name] = true + } else { + structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") + } + } +} + +type AuthArray []Auth + +func (r *AuthArray) UnmarshalJSON(data []byte) error { + if len(data) == 0 { + return fmt.Errorf("no bytes to unmarshal") + } + + switch data[0] { + case '"': + return r.unmarshalFile(data) + case '[': + return r.unmarshalMany(data) + } + + return fmt.Errorf("auth value '%s' is not supported, it must be an array or string", string(data)) +} + +func (r *AuthArray) unmarshalFile(data []byte) error { + b, err := unmarshalFile(data) + if err != nil { + return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) + } + + return r.unmarshalMany(b) +} + +func (r *AuthArray) unmarshalMany(data []byte) error { + var auths []Auth + err := json.Unmarshal(data, &auths) + if err != nil { + return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) + } + + *r = auths + return nil } // Workflow base definition @@ -130,6 +184,7 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &workflowMap); err != nil { return err } + var rawStates []json.RawMessage if err := json.Unmarshal(workflowMap["states"], &rawStates); err != nil { return err diff --git a/parser/parser_test.go b/parser/parser_test.go index 59598b5..7986437 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -15,8 +15,10 @@ package parser import ( + "encoding/json" "os" "path/filepath" + "strings" "testing" "github.com/stretchr/testify/assert" @@ -171,11 +173,11 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, operationState.Actions) assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 1) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token + auth := w.Auth + assert.Equal(t, len(auth), 1) + assert.Equal(t, "testAuth", auth[0].Name) + assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) + bearerProperties := auth[0].Properties.(*model.BearerAuthProperties).Token assert.Equal(t, "test_token", bearerProperties) }, }, { @@ -194,15 +196,15 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, operationState.Actions) assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) assert.NotNil(t, w.Auth) - assert.NotNil(t, w.Auth.Defs) - assert.Equal(t, len(w.Auth.Defs), 2) - assert.Equal(t, "testAuth", w.Auth.Defs[0].Name) - assert.Equal(t, model.AuthTypeBearer, w.Auth.Defs[0].Scheme) - bearerProperties := w.Auth.Defs[0].Properties.(*model.BearerAuthProperties).Token + auth := w.Auth + assert.Equal(t, len(auth), 2) + assert.Equal(t, "testAuth", auth[0].Name) + assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) + bearerProperties := auth[0].Properties.(*model.BearerAuthProperties).Token assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", w.Auth.Defs[1].Name) - assert.Equal(t, model.AuthTypeBasic, w.Auth.Defs[1].Scheme) - basicProperties := w.Auth.Defs[1].Properties.(*model.BasicAuthProperties) + assert.Equal(t, "testAuth2", auth[1].Name) + assert.Equal(t, model.AuthTypeBasic, auth[1].Scheme) + basicProperties := auth[1].Properties.(*model.BasicAuthProperties) assert.Equal(t, "test_user", basicProperties.Username) assert.Equal(t, "test_pwd", basicProperties.Password) }, @@ -497,3 +499,138 @@ func TestFromFile(t *testing.T) { ) } } + +func TestUnmarshalWorkflowBasicTests(t *testing.T) { + t.Run("BasicWorkflowYamlNoAuthDefs", func(t *testing.T) { + workflow, err := FromYAMLSource([]byte(` +id: helloworld +version: '1.0.0' +specVersion: '0.8' +name: Hello World Workflow +description: Inject Hello World +start: Hello State +states: +- name: Hello State + type: inject + data: + result: Hello World! + end: true +`)) + assert.Nil(t, err) + assert.NotNil(t, workflow) + + b, err := json.Marshal(workflow) + assert.Nil(t, err) + assert.True(t, !strings.Contains(string(b), "auth")) + + workflow = nil + err = json.Unmarshal(b, &workflow) + assert.Nil(t, err) + }) + + t.Run("BasicWorkflowBasicAuthJSONSource", func(t *testing.T) { + workflow, err := FromJSONSource([]byte(` +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.8", + "auth": [ + { + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" + } + }, + { + "name": "testAuth2", + "scheme": "basic", + "properties": { + "username": "test_user", + "password": "test_pwd" + } + } + ], + "states": [ + { + "name": "Hello State", + "type": "inject", + "data": { + "result": "Hello World!" + }, + "end": true + } + ] +} +`)) + assert.Nil(t, err) + assert.NotNil(t, workflow.Auth) + + b, _ := json.Marshal(workflow) + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{},\"data\":{\"result\":\"Hello World!\"}}]}", + string(b)) + + }) + + t.Run("BasicWorkflowBasicAuthStringJSONSource", func(t *testing.T) { + workflow, err := FromJSONSource([]byte(` +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.8", + "auth": "./testdata/workflows/urifiles/auth.json", + "states": [ + { + "name": "Hello State", + "type": "inject", + "data": { + "result": "Hello World!" + }, + "end": true + } + ] +} +`)) + assert.Nil(t, err) + assert.NotNil(t, workflow.Auth) + + b, _ := json.Marshal(workflow) + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{},\"data\":{\"result\":\"Hello World!\"}}]}", + string(b)) + + }) + + t.Run("BasicWorkflowInteger", func(t *testing.T) { + workflow, err := FromJSONSource([]byte(` +{ + "id": "applicantrequest", + "version": "1.0", + "name": "Applicant Request Decision Workflow", + "description": "Determine if applicant request is valid", + "start": "CheckApplication", + "specVersion": "0.7", + "auth": 123, + "states": [ + { + "name": "Hello State", + "type": "inject", + "data": { + "result": "Hello World!" + }, + "end": true + } + ] +} +`)) + + assert.NotNil(t, err) + assert.Equal(t, "auth value '123' is not supported, it must be an array or string", err.Error()) + assert.Nil(t, workflow) + }) +} From 09942a6261cf6341324b8a6df310997332a838cc Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Wed, 16 Nov 2022 19:33:02 +0800 Subject: [PATCH 039/110] fix(121): split inject state to separate file (#123) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- README.md | 2 +- model/inject_state.go | 29 +++++++++++++++++++++++++++++ model/states.go | 14 -------------- 3 files changed, 30 insertions(+), 15 deletions(-) create mode 100644 model/inject_state.go diff --git a/README.md b/README.md index 27725ec..7120046 100644 --- a/README.md +++ b/README.md @@ -64,4 +64,4 @@ The `Workflow` structure then can be used in your application. ## Slack Channel -Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. +Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. \ No newline at end of file diff --git a/model/inject_state.go b/model/inject_state.go new file mode 100644 index 0000000..a6d8ec5 --- /dev/null +++ b/model/inject_state.go @@ -0,0 +1,29 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// InjectState used to inject static data into state data input. +type InjectState struct { + BaseState + // JSON object which can be set as states data input and can be manipulated via filters + Data map[string]interface{} `json:"data" validate:"required,min=1"` + // State specific timeouts + Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` +} + +// InjectStateTimeout defines timeout settings for inject state +type InjectStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` +} diff --git a/model/states.go b/model/states.go index 124b578..f532b76 100644 --- a/model/states.go +++ b/model/states.go @@ -136,20 +136,6 @@ func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFi // GetMetadata ... func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } -// InjectState ... -type InjectState struct { - BaseState - // JSON object which can be set as states data input and can be manipulated via filters - Data map[string]interface{} `json:"data" validate:"required,min=1"` - // State specific timeouts - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` -} - -// InjectStateTimeout ... -type InjectStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - // BaseSwitchState ... type BaseSwitchState struct { BaseState From 7b28917b28ce19989598c494edd9c1c33f3c3242 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Thu, 17 Nov 2022 22:05:15 +0800 Subject: [PATCH 040/110] fix(122): split switch state to separate file (#127) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/states.go | 206 +----------- model/switch_state.go | 138 ++++++++ model/switch_state_test.go | 310 ++++++++++++++++++ model/workflow.go | 6 - parser/parser_test.go | 227 +++++++++++-- .../applicationrequest-issue103.json | 2 +- .../workflows/applicationrequest-issue69.json | 2 +- .../applicationrequest.multiauth.json | 2 +- .../workflows/applicationrequest.url.json | 2 +- .../workflows/eventbaseddataandswitch.sw.json | 185 ++++++----- .../workflows/greetings-v08-spec.sw.yaml | 7 +- 11 files changed, 746 insertions(+), 341 deletions(-) create mode 100644 model/switch_state.go create mode 100644 model/switch_state_test.go diff --git a/model/states.go b/model/states.go index f532b76..51b7e7e 100644 --- a/model/states.go +++ b/model/states.go @@ -14,10 +14,6 @@ package model -import ( - "encoding/json" -) - const ( // StateTypeDelay ... StateTypeDelay = "delay" @@ -50,10 +46,7 @@ func getActionsModelMapping(stateType string, s map[string]interface{}) (State, case StateTypeParallel: return &ParallelState{}, true case StateTypeSwitch: - if _, ok := s["dataConditions"]; ok { - return &DataBasedSwitchState{}, true - } - return &EventBasedSwitchState{}, true + return &SwitchState{}, true case StateTypeInject: return &InjectState{}, true case StateTypeForEach: @@ -135,200 +128,3 @@ func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFi // GetMetadata ... func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } - -// BaseSwitchState ... -type BaseSwitchState struct { - BaseState - // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition - DefaultCondition DefaultCondition `json:"defaultCondition,omitempty"` -} - -// EventBasedSwitchState Permits transitions to other states based on events -type EventBasedSwitchState struct { - BaseSwitchState - // Defines conditions evaluated against events - EventConditions []EventCondition `json:"eventConditions" validate:"required,min=1,dive"` - // State specific timeouts - Timeouts *EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON implementation for json Unmarshal function for the EventBasedSwitch type -func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { - return err - } - eventBasedSwitch := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &eventBasedSwitch); err != nil { - return err - } - - eventBaseTimeoutsRawMessage, ok := eventBasedSwitch["timeouts"] - if ok { - if err := json.Unmarshal(eventBaseTimeoutsRawMessage, &j.Timeouts); err != nil { - return err - } - } - - var rawConditions []json.RawMessage - if err := json.Unmarshal(eventBasedSwitch["eventConditions"], &rawConditions); err != nil { - return err - } - - j.EventConditions = make([]EventCondition, len(rawConditions)) - var mapConditions map[string]interface{} - for i, rawCondition := range rawConditions { - if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { - return err - } - var condition EventCondition - if _, ok := mapConditions["end"]; ok { - condition = &EndEventCondition{} - } else { - condition = &TransitionEventCondition{} - } - if err := json.Unmarshal(rawCondition, condition); err != nil { - return err - } - j.EventConditions[i] = condition - } - - return nil -} - -// EventBasedSwitchStateTimeout ... -type EventBasedSwitchStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - EventTimeout string `json:"eventTimeout,omitempty"` -} - -// EventCondition ... -type EventCondition interface { - GetName() string - GetEventRef() string - GetEventDataFilter() EventDataFilter - GetMetadata() Metadata -} - -// BaseEventCondition ... -type BaseEventCondition struct { - // Event condition name - Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events - EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` - Metadata Metadata `json:"metadata,omitempty"` -} - -// GetEventRef ... -func (e *BaseEventCondition) GetEventRef() string { return e.EventRef } - -// GetEventDataFilter ... -func (e *BaseEventCondition) GetEventDataFilter() EventDataFilter { return e.EventDataFilter } - -// GetMetadata ... -func (e *BaseEventCondition) GetMetadata() Metadata { return e.Metadata } - -// GetName ... -func (e *BaseEventCondition) GetName() string { return e.Name } - -// TransitionEventCondition Switch state data event condition -type TransitionEventCondition struct { - BaseEventCondition - // Next transition of the workflow if there is valid matches - Transition Transition `json:"transition" validate:"required"` -} - -// EndEventCondition Switch state data event condition -type EndEventCondition struct { - BaseEventCondition - // Explicit transition to end - End End `json:"end" validate:"required"` -} - -// DataBasedSwitchState Permits transitions to other states based on data conditions -type DataBasedSwitchState struct { - BaseSwitchState - DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` - Timeouts *DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` -} - -// UnmarshalJSON implementation for json Unmarshal function for the DataBasedSwitch type -func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &j.BaseSwitchState); err != nil { - return err - } - dataBasedSwitch := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &dataBasedSwitch); err != nil { - return err - } - if err := json.Unmarshal(data, &j.Timeouts); err != nil { - return err - } - var rawConditions []json.RawMessage - if err := json.Unmarshal(dataBasedSwitch["dataConditions"], &rawConditions); err != nil { - return err - } - j.DataConditions = make([]DataCondition, len(rawConditions)) - var mapConditions map[string]interface{} - for i, rawCondition := range rawConditions { - if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { - return err - } - var condition DataCondition - if _, ok := mapConditions["end"]; ok { - condition = &EndDataCondition{} - } else { - condition = &TransitionDataCondition{} - } - if err := json.Unmarshal(rawCondition, condition); err != nil { - return err - } - j.DataConditions[i] = condition - } - return nil -} - -// DataBasedSwitchStateTimeout ... -type DataBasedSwitchStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} - -// DataCondition ... -type DataCondition interface { - GetName() string - GetCondition() string - GetMetadata() Metadata -} - -// BaseDataCondition ... -type BaseDataCondition struct { - // Data condition name - Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false - Condition string `json:"condition" validate:"required"` - Metadata Metadata `json:"metadata,omitempty"` -} - -// GetName ... -func (b *BaseDataCondition) GetName() string { return b.Name } - -// GetCondition ... -func (b *BaseDataCondition) GetCondition() string { return b.Condition } - -// GetMetadata ... -func (b *BaseDataCondition) GetMetadata() Metadata { return b.Metadata } - -// TransitionDataCondition ... -type TransitionDataCondition struct { - BaseDataCondition - // Workflow transition if condition is evaluated to true - Transition Transition `json:"transition" validate:"required"` -} - -// EndDataCondition ... -type EndDataCondition struct { - BaseDataCondition - // Workflow end definition - End End `json:"end" validate:"required"` -} diff --git a/model/switch_state.go b/model/switch_state.go new file mode 100644 index 0000000..0890060 --- /dev/null +++ b/model/switch_state.go @@ -0,0 +1,138 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "reflect" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + + "github.com/go-playground/validator/v10" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(SwitchStateStructLevelValidation, SwitchState{}) + val.GetValidator().RegisterStructValidationCtx(DefaultConditionStructLevelValidation, DefaultCondition{}) + val.GetValidator().RegisterStructValidationCtx(EventConditionStructLevelValidation, EventCondition{}) + val.GetValidator().RegisterStructValidationCtx(DataConditionStructLevelValidation, DataCondition{}) +} + +// SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. +type SwitchState struct { + // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. + BaseState + + // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition + // Required + DefaultCondition DefaultCondition `json:"defaultCondition"` + // Defines conditions evaluated against events + EventConditions []EventCondition `json:"eventConditions" validate:"omitempty,min=1,dive"` + // Defines conditions evaluated against data + DataConditions []DataCondition `json:"dataConditions" validate:"omitempty,min=1,dive"` + // SwitchState specific timeouts + Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` +} + +// SwitchStateStructLevelValidation custom validator for SwitchState +func SwitchStateStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { + switchState := structLevel.Current().Interface().(SwitchState) + switch { + case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataCnoditions, eventConditions") + case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataCnoditions, eventConditions") + } +} + +// DefaultCondition Can be either a transition or end definition +type DefaultCondition struct { + Transition *Transition `json:"transition,omitempty"` + End *End `json:"end,omitempty"` +} + +// DefaultConditionStructLevelValidation custom validator for DefaultCondition +func DefaultConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { + defaultCondition := structLevel.Current().Interface().(DefaultCondition) + switch { + case defaultCondition.End == nil && defaultCondition.Transition == nil: + structLevel.ReportError(reflect.ValueOf(defaultCondition), "Transition", "transition", "required", "must have one of transition, end") + case defaultCondition.Transition != nil && defaultCondition.End != nil: + structLevel.ReportError(reflect.ValueOf(defaultCondition), "Transition", "transition", "exclusive", "must have one of transition, end") + } +} + +// SwitchStateTimeout defines the specific timeout settings for switch state +type SwitchStateTimeout struct { + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + + // EventTimeout specify the expire value to transitions to defaultCondition + // when event-based conditions do not arrive. + // NOTE: this is only available for EventConditions + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` +} + +// EventCondition specify events which the switch state must wait for. +type EventCondition struct { + // Event condition name + Name string `json:"name,omitempty"` + // References a unique event name in the defined workflow events + EventRef string `json:"eventRef" validate:"required"` + // Event data filter definition + EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + + // Explicit transition to end + End *End `json:"end" validate:"omitempty"` + // Workflow transition if condition is evaluated to true + Transition *Transition `json:"transition" validate:"omitempty"` +} + +// EventConditionStructLevelValidation custom validator for EventCondition +func EventConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { + eventCondition := structLevel.Current().Interface().(EventCondition) + switch { + case eventCondition.End == nil && eventCondition.Transition == nil: + structLevel.ReportError(reflect.ValueOf(eventCondition), "Transition", "transition", "required", "must have one of transition, end") + case eventCondition.Transition != nil && eventCondition.End != nil: + structLevel.ReportError(reflect.ValueOf(eventCondition), "Transition", "transition", "exclusive", "must have one of transition, end") + } +} + +// DataCondition specify a data-based condition statement which causes a transition to another workflow state +// if evaluated to true. +type DataCondition struct { + // Data condition name + Name string `json:"name,omitempty"` + // Workflow expression evaluated against state data. Must evaluate to true or false + Condition string `json:"condition" validate:"required"` + Metadata Metadata `json:"metadata,omitempty"` + + // Explicit transition to end + End *End `json:"end" validate:"omitempty"` + // Workflow transition if condition is evaluated to true + Transition *Transition `json:"transition" validate:"omitempty"` +} + +// DataConditionStructLevelValidation custom validator for DataCondition +func DataConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { + dataCondition := structLevel.Current().Interface().(DataCondition) + switch { + case dataCondition.End == nil && dataCondition.Transition == nil: + structLevel.ReportError(reflect.ValueOf(dataCondition), "Transition", "transition", "required", "must have one of transition, end") + case dataCondition.Transition != nil && dataCondition.End != nil: + structLevel.ReportError(reflect.ValueOf(dataCondition), "Transition", "transition", "exclusive", "must have one of transition, end") + } +} diff --git a/model/switch_state_test.go b/model/switch_state_test.go new file mode 100644 index 0000000..9bfbf17 --- /dev/null +++ b/model/switch_state_test.go @@ -0,0 +1,310 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestSwitchStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj SwitchState + err string + } + testCases := []testCase{ + { + desp: "normal & eventConditions", + obj: SwitchState{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + err: ``, + }, + { + desp: "normal & dataConditions", + obj: SwitchState{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + err: ``, + }, + { + desp: "missing eventConditions & dataConditions", + obj: SwitchState{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + }, + err: `Key: 'SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, + }, + { + desp: "exclusive eventConditions & dataConditions", + obj: SwitchState{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + err: `Key: 'SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestDefaultConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj DefaultCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: DefaultCondition{ + End: &End{}, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: DefaultCondition{}, + err: `DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: DefaultCondition{ + End: &End{}, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestEventConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj EventCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: EventCondition{ + EventRef: "1", + End: &End{}, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: EventCondition{ + EventRef: "1", + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: EventCondition{ + EventRef: "1", + }, + err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: EventCondition{ + EventRef: "1", + End: &End{}, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestDataConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj DataCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: DataCondition{ + Condition: "1", + End: &End{}, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: DataCondition{ + Condition: "1", + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: DataCondition{ + Condition: "1", + }, + err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: DataCondition{ + Condition: "1", + End: &End{}, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 53b9706..ab9168e 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -403,12 +403,6 @@ func (s *Start) UnmarshalJSON(data []byte) error { return nil } -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - Transition *Transition `json:"transition,omitempty"` - End *End `json:"end,omitempty"` -} - // Schedule ... type Schedule struct { // Time interval (must be repeating interval) described with ISO 8601 format. Declares when workflow instances will be automatically created. diff --git a/parser/parser_test.go b/parser/parser_test.go index 7986437..63dc276 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -91,9 +91,9 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "Event Based Switch Transitions", w.Name) assert.Equal(t, "Start", w.States[0].GetName()) assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[1]) - assert.Equal(t, "PT1H", w.States[1].(*model.EventBasedSwitchState).Timeouts.EventTimeout) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + assert.IsType(t, &model.SwitchState{}, w.States[1]) + assert.Equal(t, "PT1H", w.States[1].(*model.SwitchState).Timeouts.EventTimeout) }, }, { "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { @@ -150,21 +150,21 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.EventBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + eventState := w.States[0].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.EventConditions) assert.NotEmpty(t, eventState.Name) - assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) + assert.IsType(t, model.EventCondition{}, eventState.EventConditions[0]) }, }, { "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + eventState := w.States[0].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) assert.Equal(t, "CheckApplication", w.Start.StateName) assert.IsType(t, &model.OperationState{}, w.States[1]) @@ -183,11 +183,11 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + eventState := w.States[0].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) assert.Equal(t, "CheckApplication", w.Start.StateName) assert.IsType(t, &model.OperationState{}, w.States[1]) @@ -211,20 +211,20 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + eventState := w.States[0].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) }, }, { "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - eventState := w.States[0].(*model.DataBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + eventState := w.States[0].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) + assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) }, }, { @@ -263,8 +263,8 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) - dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) + assert.IsType(t, &model.SwitchState{}, w.States[0]) + dataBaseSwitchState := w.States[0].(*model.SwitchState) assert.NotNil(t, dataBaseSwitchState) assert.NotEmpty(t, dataBaseSwitchState.DataConditions) assert.Equal(t, "CheckApplication", w.States[0].GetName()) @@ -321,13 +321,13 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Notify Customer", w.Name) - eventState := w.States[1].(*model.DataBasedSwitchState) + eventState := w.States[1].(*model.SwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, &model.EndDataCondition{}, eventState.DataConditions[0]) + assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - endDataCondition := eventState.DataConditions[0].(*model.EndDataCondition) + endDataCondition := eventState.DataConditions[0] assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) assert.Equal(t, "${ del(.customerCount) }", endDataCondition.End.ContinueAs.Data) @@ -397,28 +397,28 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "PT2S", w.States[2].(*model.ParallelState).Timeouts.StateExecTimeout.Single) // Switch state - assert.NotEmpty(t, w.States[3].(*model.EventBasedSwitchState).EventConditions) + assert.NotEmpty(t, w.States[3].(*model.SwitchState).EventConditions) assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].GetName()) assert.Equal(t, model.StateType("switch"), w.States[3].GetType()) - assert.Equal(t, "PT1H", w.States[3].(*model.EventBasedSwitchState).Timeouts.EventTimeout) - assert.Equal(t, "PT1S", w.States[3].(*model.EventBasedSwitchState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[3].(*model.EventBasedSwitchState).Timeouts.StateExecTimeout.Single) + assert.Equal(t, "PT1H", w.States[3].(*model.SwitchState).Timeouts.EventTimeout) + assert.Equal(t, "PT1S", w.States[3].(*model.SwitchState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[3].(*model.SwitchState).Timeouts.StateExecTimeout.Single) assert.Equal(t, &model.Transition{ NextState: "HandleNoVisaDecision", - }, w.States[3].(*model.EventBasedSwitchState).DefaultCondition.Transition) + }, w.States[3].(*model.SwitchState).DefaultCondition.Transition) // DataBasedSwitchState - dataBased := w.States[4].(*model.DataBasedSwitchState) + dataBased := w.States[4].(*model.SwitchState) assert.NotEmpty(t, dataBased.DataConditions) assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].GetName()) - dataCondition := dataBased.DataConditions[0].(*model.TransitionDataCondition) + dataCondition := dataBased.DataConditions[0] assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) assert.Equal(t, &model.Transition{ NextState: "RejectApplication", - }, w.States[4].(*model.DataBasedSwitchState).DefaultCondition.Transition) - assert.Equal(t, "PT1S", w.States[4].(*model.DataBasedSwitchState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[4].(*model.DataBasedSwitchState).Timeouts.StateExecTimeout.Single) + }, w.States[4].(*model.SwitchState).DefaultCondition.Transition) + assert.Equal(t, "PT1S", w.States[4].(*model.SwitchState).Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[4].(*model.SwitchState).Timeouts.StateExecTimeout.Single) // operation state assert.NotEmpty(t, w.States[5].(*model.OperationState).Actions) @@ -634,3 +634,164 @@ states: assert.Nil(t, workflow) }) } + +func TestUnmarshalWorkflowSwitchState(t *testing.T) { + t.Run("WorkflowSwitchStateEventConditions", func(t *testing.T) { + workflow, err := FromYAMLSource([]byte(` +id: helloworld +version: '1.0.0' +specVersion: '0.8' +name: Hello World Workflow +description: Inject Hello World +start: Hello State +states: +- name: Hello State + type: switch + eventConditions: + - eventRef: visaApprovedEvent + transition: + nextState: HandleApprovedVisa + - eventRef: visaRejectedEvent + transition: + nextState: HandleRejectedVisa + defaultCondition: + transition: + nextState: HandleNoVisaDecision +- name: HandleApprovedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleApprovedVisaWorkflowID + end: + terminate: true +- name: HandleRejectedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleRejectedVisaWorkflowID + end: + terminate: true +- name: HandleNoVisaDecision + type: operation + actions: + - subFlowRef: + workflowId: handleNoVisaDecisionWorkfowId + end: + terminate: true + +`)) + assert.Nil(t, err) + assert.NotNil(t, workflow) + + b, err := json.Marshal(workflow) + assert.Nil(t, err) + assert.True(t, strings.Contains(string(b), "eventConditions")) + + workflow = nil + err = json.Unmarshal(b, &workflow) + assert.Nil(t, err) + }) + + t.Run("WorkflowSwitchStateDataConditions", func(t *testing.T) { + workflow, err := FromYAMLSource([]byte(` +id: helloworld +version: '1.0.0' +specVersion: '0.8' +name: Hello World Workflow +description: Inject Hello World +start: Hello State +states: +- name: Hello State + type: switch + dataConditions: + - condition: ${ true } + transition: + nextState: HandleApprovedVisa + - condition: ${ false } + transition: + nextState: HandleRejectedVisa + defaultCondition: + transition: + nextState: HandleNoVisaDecision +- name: HandleApprovedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleApprovedVisaWorkflowID + end: + terminate: true +- name: HandleRejectedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleRejectedVisaWorkflowID + end: + terminate: true +- name: HandleNoVisaDecision + type: operation + actions: + - subFlowRef: + workflowId: handleNoVisaDecisionWorkfowId + end: + terminate: true +`)) + assert.Nil(t, err) + assert.NotNil(t, workflow) + + b, err := json.Marshal(workflow) + assert.Nil(t, err) + assert.True(t, strings.Contains(string(b), "dataConditions")) + + workflow = nil + err = json.Unmarshal(b, &workflow) + assert.Nil(t, err) + }) + + t.Run("WorkflowSwitchStateDataConditions with wrong field name", func(t *testing.T) { + workflow, err := FromYAMLSource([]byte(` +id: helloworld +version: '1.0.0' +specVersion: '0.8' +name: Hello World Workflow +description: Inject Hello World +start: Hello State +states: +- name: Hello State + type: switch + dataCondition: + - condition: ${ true } + transition: + nextState: HandleApprovedVisa + - condition: ${ false } + transition: + nextState: HandleRejectedVisa + defaultCondition: + transition: + nextState: HandleNoVisaDecision +- name: HandleApprovedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleApprovedVisaWorkflowID + end: + terminate: true +- name: HandleRejectedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleRejectedVisaWorkflowID + end: + terminate: true +- name: HandleNoVisaDecision + type: operation + actions: + - subFlowRef: + workflowId: handleNoVisaDecisionWorkfowId + end: + terminate: true +`)) + assert.Error(t, err) + assert.Regexp(t, `validation for \'DataConditions\' failed on the \'required\' tag`, err) + assert.Nil(t, workflow) + }) +} diff --git a/parser/testdata/workflows/applicationrequest-issue103.json b/parser/testdata/workflows/applicationrequest-issue103.json index 9b8c0a2..48b71fc 100644 --- a/parser/testdata/workflows/applicationrequest-issue103.json +++ b/parser/testdata/workflows/applicationrequest-issue103.json @@ -37,7 +37,7 @@ } } ], - "default": { + "defaultCondition": { "transition": { "nextState": "RejectApplication" } diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json index 876faec..8f95b6e 100644 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ b/parser/testdata/workflows/applicationrequest-issue69.json @@ -37,7 +37,7 @@ } } ], - "default": { + "defaultCondition": { "transition": { "nextState": "RejectApplication" } diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json index 0b7d5e9..66159a5 100644 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ b/parser/testdata/workflows/applicationrequest.multiauth.json @@ -54,7 +54,7 @@ } } ], - "default": { + "defaultCondition": { "transition": { "nextState": "RejectApplication" } diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json index 0a9be7a..a410993 100644 --- a/parser/testdata/workflows/applicationrequest.url.json +++ b/parser/testdata/workflows/applicationrequest.url.json @@ -27,7 +27,7 @@ } } ], - "default": { + "defaultCondition": { "transition": { "nextState": "RejectApplication" } diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json index 843669d..bdf80d6 100644 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ b/parser/testdata/workflows/eventbaseddataandswitch.sw.json @@ -1,102 +1,107 @@ { - "id": "eventbaseddataandswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.8", - "start": { - "stateName": "Start" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" + "id": "eventbaseddataandswitch", + "version": "1.0", + "name": "Event Based Switch Transitions", + "description": "Event Based Switch Transitions with Event Database Condition", + "specVersion": "0.8", + "start": { + "stateName": "Start" }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "Start", - "type": "switch", - "dataConditions": [ + "events": [ { - "condition": "${ true }", - "transition": "CheckVisaStatus" + "name": "visaApprovedEvent", + "type": "VisaApproved", + "source": "visaCheckSource" + }, + { + "name": "visaRejectedEvent", + "type": "VisaRejected", + "source": "visaCheckSource" } - ] - }, - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ + ], + "states": [ { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } + "name": "Start", + "type": "switch", + "dataConditions": [ + { + "condition": "${ true }", + "transition": "CheckVisaStatus" + } + ], + "defaultCondition": { + "transition": { + "nextState": "CheckVisaStatus" + } + } }, { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ + "name": "CheckVisaStatus", + "type": "switch", + "eventConditions": [ + { + "eventRef": "visaApprovedEvent", + "transition": { + "nextState": "HandleApprovedVisa" + } + }, + { + "eventRef": "visaRejectedEvent", + "transition": { + "nextState": "HandleRejectedVisa" + } + } + ], + "timeouts": { + "eventTimeout": "PT1H" + }, + "defaultCondition": { + "transition": { + "nextState": "HandleNoVisaDecision" + } + } + }, { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ + "name": "HandleApprovedVisa", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "handleApprovedVisaWorkflowID" + } + } + ], + "end": { + "terminate": true + } + }, { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ + "name": "HandleRejectedVisa", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "handleRejectedVisaWorkflowID" + } + } + ], + "end": { + "terminate": true + } + }, { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } + "name": "HandleNoVisaDecision", + "type": "operation", + "actions": [ + { + "subFlowRef": { + "workflowId": "handleNoVisaDecisionWorkfowId" + } + } + ], + "end": { + "terminate": true + } } - ], - "end": { - "terminate": true - } - } - ] + ] } \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 7d69a81..c1d46be 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -96,9 +96,10 @@ states: nextState: StartApplication defaultCondition: transition: RejectApplication - stateExecTimeout: - total: PT1S - single: PT2S + timeouts: + stateExecTimeout: + total: PT1S + single: PT2S - name: GreetSequential type: operation actionMode: sequential From bb8e1d9e6d19c2e130fef98474f88797320ae1a2 Mon Sep 17 00:00:00 2001 From: Songlin Yang <511121939@qq.com> Date: Mon, 21 Nov 2022 19:04:48 +0800 Subject: [PATCH 041/110] feat(114): support IncludePath option to define directory for non-absolute import files (#130) Signed-off-by: lsytj0413 <511121939@qq.com> Signed-off-by: lsytj0413 <511121939@qq.com> --- model/util.go | 45 ++++++++++++++++++++++--- model/util_benchmark_test.go | 31 +++++++++++++++++ model/util_test.go | 20 +++++++++++ parser/parser_test.go | 4 +++ parser/{ => testdata}/eventdefs.yml | 0 parser/{ => testdata}/functiondefs.json | 0 6 files changed, 96 insertions(+), 4 deletions(-) create mode 100644 model/util_benchmark_test.go rename parser/{ => testdata}/eventdefs.yml (100%) rename parser/{ => testdata}/functiondefs.json (100%) diff --git a/model/util.go b/model/util.go index 5e4e102..837e2d5 100644 --- a/model/util.go +++ b/model/util.go @@ -22,6 +22,7 @@ import ( "os" "path/filepath" "strings" + "sync/atomic" "sigs.k8s.io/yaml" ) @@ -44,11 +45,20 @@ func getBytesFromFile(s string) (b []byte, err error) { } return buf.Bytes(), nil } - if strings.HasPrefix(s, prefix) { - s = strings.TrimPrefix(s, prefix) - } else if s, err = filepath.Abs(s); err != nil { - return nil, err + s = strings.TrimPrefix(s, prefix) + + if !filepath.IsAbs(s) { + // The import file is an non-absolute path, we join it with include path + // TODO: if the file didn't find in any include path, we should report an error + for _, p := range IncludePaths() { + sn := filepath.Join(p, s) + if _, err := os.Stat(sn); err == nil { + s = sn + break + } + } } + if b, err = os.ReadFile(filepath.Clean(s)); err != nil { return nil, err } @@ -104,3 +114,30 @@ func unmarshalFile(data []byte) (b []byte, err error) { } return file, nil } + +var defaultIncludePaths atomic.Value + +func init() { + wd, err := os.Getwd() + if err != nil { + panic(err) + } + + SetIncludePaths([]string{wd}) +} + +// IncludePaths will return the search path for non-absolute import file +func IncludePaths() []string { + return defaultIncludePaths.Load().([]string) +} + +// SetIncludePaths will update the search path for non-absolute import file +func SetIncludePaths(paths []string) { + for _, path := range paths { + if !filepath.IsAbs(path) { + panic(fmt.Errorf("%s must be an absolute file path", path)) + } + } + + defaultIncludePaths.Store(paths) +} diff --git a/model/util_benchmark_test.go b/model/util_benchmark_test.go new file mode 100644 index 0000000..4048a6b --- /dev/null +++ b/model/util_benchmark_test.go @@ -0,0 +1,31 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "fmt" + "testing" +) + +func Benchmark_IncludePaths_Parallel(b *testing.B) { + b.RunParallel(func(p *testing.PB) { + i := 0 + for p.Next() { + IncludePaths() + SetIncludePaths([]string{fmt.Sprintf("%v", i)}) + i++ + } + }) +} diff --git a/model/util_test.go b/model/util_test.go index c960f3c..ce43d7b 100644 --- a/model/util_test.go +++ b/model/util_test.go @@ -13,3 +13,23 @@ // limitations under the License. package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIncludePaths(t *testing.T) { + assert.NotNil(t, IncludePaths()) + assert.True(t, len(IncludePaths()) > 0) + + // update include paths + paths := []string{"/root", "/path"} + SetIncludePaths(paths) + assert.Equal(t, IncludePaths(), paths) + + assert.PanicsWithError(t, "1 must be an absolute file path", assert.PanicTestFunc(func() { + SetIncludePaths([]string{"1"}) + })) +} diff --git a/parser/parser_test.go b/parser/parser_test.go index 63dc276..37b93c9 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -24,12 +24,16 @@ import ( "github.com/stretchr/testify/assert" "github.com/serverlessworkflow/sdk-go/v2/model" + "github.com/serverlessworkflow/sdk-go/v2/test" ) func TestBasicValidation(t *testing.T) { rootPath := "./testdata/workflows" files, err := os.ReadDir(rootPath) assert.NoError(t, err) + + model.SetIncludePaths(append(model.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) + for _, file := range files { if !file.IsDir() { workflow, err := FromFile(filepath.Join(rootPath, file.Name())) diff --git a/parser/eventdefs.yml b/parser/testdata/eventdefs.yml similarity index 100% rename from parser/eventdefs.yml rename to parser/testdata/eventdefs.yml diff --git a/parser/functiondefs.json b/parser/testdata/functiondefs.json similarity index 100% rename from parser/functiondefs.json rename to parser/testdata/functiondefs.json From a073b0e8eca114f0c2fe76edf4d078f86f7769c7 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 20 Dec 2022 07:40:54 -0300 Subject: [PATCH 042/110] first iteration - deepcopy (#131) * first iteration - deepcopy Signed-off-by: spolti * review additions Signed-off-by: spolti * add builders for the object types Signed-off-by: spolti Signed-off-by: spolti --- .github/workflows/Go-SDK-PR-Check.yaml | 4 + .lift.toml | 3 + Makefile | 6 +- go.mod | 2 +- hack/boilerplate.txt | 13 + hack/deepcopy-gen.sh | 48 + model/action.go | 3 +- model/auth.go | 15 + model/callback_state.go | 4 + model/common.go | 2 +- model/delay_state.go | 4 + model/doc.go | 18 + model/event.go | 8 +- model/event_state.go | 6 +- model/foreach_state.go | 4 + model/inject_state.go | 6 +- model/object.go | 157 ++ model/operation_state.go | 4 + model/parallel_state.go | 4 + model/retry.go | 2 +- model/sleep_state.go | 4 + model/states.go | 4 +- model/switch_state.go | 6 +- model/util.go | 6 +- model/workflow.go | 14 +- model/workflow_test.go | 8 +- model/zz_generated.deepcopy.go | 1530 +++++++++++++++++ parser/parser_test.go | 74 +- .../applicationrequest.multiauth.json | 10 +- .../workflows/greetings-v08-spec.sw.yaml | 12 + 30 files changed, 1946 insertions(+), 35 deletions(-) create mode 100644 .lift.toml create mode 100644 hack/boilerplate.txt create mode 100755 hack/deepcopy-gen.sh create mode 100644 model/doc.go create mode 100644 model/object.go create mode 100644 model/zz_generated.deepcopy.go diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 8226f60..ebe5592 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -57,6 +57,10 @@ jobs: make addheaders changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) [[ -z "$changed_files" ]] || (printf "Some files are missing the headers: \n$changed_files\n Did you run 'make lint' before sending the PR" && exit 1) + - name: Check DeepCopy Generation + run: | + export GOPATH=$(go env GOPATH) + make deepcopy - name: Check Formatting run: | make fmt diff --git a/.lift.toml b/.lift.toml new file mode 100644 index 0000000..f9516c9 --- /dev/null +++ b/.lift.toml @@ -0,0 +1,3 @@ +ignoreFiles = """ +model/zz_generated.deepcopy.go +""" \ No newline at end of file diff --git a/Makefile b/Makefile index 826475e..5a7c8a4 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,10 @@ lint: .PHONY: test coverage="false" -test: +test: deepcopy make lint @go test ./... + +.PHONY: deepcopy +deepcopy: $(DEEPCOPY_GEN) ## Download deeepcopy-gen locally if necessary. + ./hack/deepcopy-gen.sh deepcopy \ No newline at end of file diff --git a/go.mod b/go.mod index 426549c..73df64b 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.7.0 + gopkg.in/yaml.v3 v3.0.1 k8s.io/apimachinery v0.25.1 sigs.k8s.io/yaml v1.3.0 ) @@ -24,7 +25,6 @@ require ( golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect golang.org/x/text v0.3.7 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect k8s.io/klog/v2 v2.70.1 // indirect sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect ) diff --git a/hack/boilerplate.txt b/hack/boilerplate.txt new file mode 100644 index 0000000..6a7425e --- /dev/null +++ b/hack/boilerplate.txt @@ -0,0 +1,13 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. \ No newline at end of file diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh new file mode 100755 index 0000000..353a682 --- /dev/null +++ b/hack/deepcopy-gen.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh +# and adapted to only install and run the deepcopy-gen + +set -o errexit +set -o nounset +set -o pipefail + +SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. +echo "Script root is $SCRIPT_ROOT" + +GENS="$1" +shift 1 + +( + # To support running this script from anywhere, first cd into this directory, + # and then install with forced module mode on and fully qualified name. + # make sure your GOPATH env is properly set. + # it will go under $GOPATH/bin + cd "$(dirname "${0}")" + GO111MODULE=on go install k8s.io/code-generator/cmd/deepcopy-gen@latest +) + +function codegen::join() { local IFS="$1"; shift; echo "$*"; } + +if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then + echo "Generating deepcopy funcs" + export GO111MODULE=on + # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 + "${GOPATH}/bin/deepcopy-gen" -v 1 \ + --input-dirs ./model -O zz_generated.deepcopy \ + --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" \ + "$@" +fi diff --git a/model/action.go b/model/action.go index c313415..f8773ef 100644 --- a/model/action.go +++ b/model/action.go @@ -69,11 +69,10 @@ type FunctionRef struct { RefName string `json:"refName" validate:"required"` // Function arguments // TODO: validate it as required if function type is graphql - Arguments map[string]interface{} `json:"arguments,omitempty"` + Arguments map[string]Object `json:"arguments,omitempty"` // String containing a valid GraphQL selection set // TODO: validate it as required if function type is graphql SelectionSet string `json:"selectionSet,omitempty"` - // Invoke specifies if the subflow should be invoked sync or async. // Defaults to sync. Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` diff --git a/model/auth.go b/model/auth.go index b23f8ec..bae7a29 100644 --- a/model/auth.go +++ b/model/auth.go @@ -110,6 +110,9 @@ type AuthProperties interface { GetMetadata() *Metadata // GetSecret ... GetSecret() string + // DeepCopyAuthProperties fixes in.Properties.DeepCopyAuthProperties undefined (type AuthProperties has no + // field or method DeepCopyAuthProperties) + DeepCopyAuthProperties() AuthProperties } // BaseAuthProperties ... @@ -148,6 +151,10 @@ func (b *BaseAuthProperties) GetSecret() string { return b.Secret } +func (b *BasicAuthProperties) DeepCopyAuthProperties() AuthProperties { + return b +} + // BasicAuthProperties Basic Auth Info type BasicAuthProperties struct { BaseAuthProperties @@ -186,6 +193,10 @@ type BearerAuthProperties struct { Token string `json:"token" validate:"required"` } +func (b *BearerAuthProperties) DeepCopyAuthProperties() AuthProperties { + return b +} + // UnmarshalJSON ... func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { properties := make(map[string]json.RawMessage) @@ -232,6 +243,10 @@ type OAuth2AuthProperties struct { RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` } +func (b *OAuth2AuthProperties) DeepCopyAuthProperties() AuthProperties { + return b +} + // TODO: use reflection to unmarshal the keys and think on a generic approach to handle them // UnmarshalJSON ... diff --git a/model/callback_state.go b/model/callback_state.go index 42b1578..04efa07 100644 --- a/model/callback_state.go +++ b/model/callback_state.go @@ -28,6 +28,10 @@ type CallbackState struct { EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` } +func (in *CallbackState) DeepCopyState() State { + return in +} + // CallbackStateTimeout defines timeout settings for callback state type CallbackStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` diff --git a/model/common.go b/model/common.go index 34c5ac7..28b2045 100644 --- a/model/common.go +++ b/model/common.go @@ -21,4 +21,4 @@ type Common struct { } // Metadata information -type Metadata map[string]interface{} +type Metadata map[string]Object diff --git a/model/delay_state.go b/model/delay_state.go index 8d96ebc..33db874 100644 --- a/model/delay_state.go +++ b/model/delay_state.go @@ -20,3 +20,7 @@ type DelayState struct { // Amount of time (ISO 8601 format) to delay TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` } + +func (in *DelayState) DeepCopyState() State { + return in +} diff --git a/model/doc.go b/model/doc.go new file mode 100644 index 0000000..1508354 --- /dev/null +++ b/model/doc.go @@ -0,0 +1,18 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// +k8s:deepcopy-gen=package +// +k8s:deepcopy-gen:nonpointer-interfaces=true diff --git a/model/event.go b/model/event.go index c51fad0..9fb205e 100644 --- a/model/event.go +++ b/model/event.go @@ -30,7 +30,7 @@ const ( // EventKindConsumed means the event continuation of workflow instance execution EventKindConsumed EventKind = "consumed" - // EventKindProduced means the event was created during worflow instance execution + // EventKindProduced means the event was created during workflow instance execution EventKindProduced EventKind = "produced" ) @@ -98,13 +98,11 @@ type EventRef struct { ResultEventRef string `json:"resultEventRef" validate:"required"` // ResultEventTimeout defines maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the actionExecutionTimeout ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // TODO: create StringOrMap structure // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. - Data interface{} `json:"data,omitempty"` + Data Object `json:"data,omitempty"` // Add additional extension context attributes to the produced event - ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` - + ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` // Invoke specifies if the subflow should be invoked sync or async. // Defaults to sync. Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` diff --git a/model/event_state.go b/model/event_state.go index ba6eb9e..f0fd896 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -34,6 +34,10 @@ type EventState struct { Timeouts *EventStateTimeout `json:"timeouts,omitempty"` } +func (e *EventState) DeepCopyState() State { + return e +} + type eventStateForUnmarshal EventState // UnmarshalJSON unmarshal EventState object from json bytes @@ -50,7 +54,7 @@ func (e *EventState) UnmarshalJSON(data []byte) error { return nil } -// OnEvents define which actions are be be performed for the one or more events. +// OnEvents define which actions are be performed for the one or more events. type OnEvents struct { // References one or more unique event names in the defined workflow events EventRefs []string `json:"eventRefs" validate:"required,min=1"` diff --git a/model/foreach_state.go b/model/foreach_state.go index ef30dd5..f35e609 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -61,6 +61,10 @@ type ForEachState struct { Mode ForEachModeType `json:"mode,omitempty"` } +func (f *ForEachState) DeepCopyState() State { + return f +} + type forEachStateForUnmarshal ForEachState func (f *ForEachState) UnmarshalJSON(data []byte) error { diff --git a/model/inject_state.go b/model/inject_state.go index a6d8ec5..91544e6 100644 --- a/model/inject_state.go +++ b/model/inject_state.go @@ -18,11 +18,15 @@ package model type InjectState struct { BaseState // JSON object which can be set as states data input and can be manipulated via filters - Data map[string]interface{} `json:"data" validate:"required,min=1"` + Data map[string]Object `json:"data" validate:"required,min=1"` // State specific timeouts Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` } +func (in *InjectState) DeepCopyState() State { + return in +} + // InjectStateTimeout defines timeout settings for inject state type InjectStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` diff --git a/model/object.go b/model/object.go new file mode 100644 index 0000000..cdb87ac --- /dev/null +++ b/model/object.go @@ -0,0 +1,157 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "math" + "strconv" + "strings" +) + +// Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. +// The DeepCopy tool allow us to easily import the Workflow types into a Kubernetes operator, +// which requires the DeepCopy method. +// +// It can marshal and unmarshal any type. +// This object type can be three types: +// - String - holds string values +// - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is +// parsed to int32 +// - raw - holds any not typed value, replaces the interface{} behavior. +type Object struct { + IObject +} + +// IObject interface that can converted into one of the three subtypes +type IObject interface { + DeepCopyIObject() IObject +} + +// raw generic subtype +type raw struct { + IObject interface{} +} + +func (o raw) DeepCopyIObject() IObject { + return o +} + +// Integer int32 type +type Integer int + +func (m Integer) DeepCopyIObject() IObject { + return m +} + +// String string type +type String string + +func (m String) DeepCopyIObject() IObject { + return m +} + +// MarshalJSON marshal the given json object into the respective Object subtype. +func (obj Object) MarshalJSON() ([]byte, error) { + switch val := obj.IObject.(type) { + case String: + return []byte(fmt.Sprintf(`%q`, val)), nil + case Integer: + return []byte(fmt.Sprintf(`%d`, val)), nil + case raw: + custom, err := json.Marshal(&struct { + raw + }{ + val, + }) + if err != nil { + return nil, err + } + + // remove the field name and the last '}' for marshalling purposes + st := strings.Replace(string(custom), "{\"IObject\":", "", 1) + st = strings.TrimSuffix(st, "}") + return []byte(st), nil + default: + return []byte(fmt.Sprintf("%+v", obj.IObject)), nil + } +} + +// UnmarshalJSON ... +func (obj *Object) UnmarshalJSON(data []byte) error { + var test interface{} + if err := json.Unmarshal(data, &test); err != nil { + return err + } + switch val := test.(type) { + case string: + var strVal String + if err := json.Unmarshal(data, &strVal); err != nil { + return err + } + obj.IObject = strVal + return nil + + case map[string]interface{}: + var cstVal raw + if err := json.Unmarshal(data, &cstVal.IObject); err != nil { + return err + } + obj.IObject = cstVal + return nil + + default: + // json parses all not typed numbers as float64, let's enforce to int32 + if valInt, parseErr := strconv.Atoi(fmt.Sprint(val)); parseErr != nil { + return fmt.Errorf("falied to parse %d to int32: %s", valInt, parseErr.Error()) + } else { + var intVal Integer + if err := json.Unmarshal(data, &intVal); err != nil { + return err + } + obj.IObject = intVal + return nil + } + } +} + +// FromInt creates an Object with an int32 value. +func FromInt(val int) Object { + if val > math.MaxInt32 || val < math.MinInt32 { + panic(fmt.Errorf("value: %d overflows int32", val)) + } + return Object{Integer(int32(val))} +} + +// FromString creates an Object with a string value. +func FromString(val string) Object { + return Object{String(val)} +} + +// FromRaw creates an Object with untyped values. +func FromRaw(val interface{}) Object { + var rawVal Object + data, err := json.Marshal(val) + if err != nil { + panic(err) + } + var cstVal raw + if err := json.Unmarshal(data, &cstVal.IObject); err != nil { + panic(err) + } + rawVal.IObject = cstVal + return rawVal +} diff --git a/model/operation_state.go b/model/operation_state.go index 95f4a68..1a74e22 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -30,6 +30,10 @@ type OperationState struct { Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` } +func (o *OperationState) DeepCopyState() State { + return o +} + type operationStateForUnmarshal OperationState // UnmarshalJSON unmarshal OperationState object from json bytes diff --git a/model/parallel_state.go b/model/parallel_state.go index 4bb77bb..f943431 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -54,6 +54,10 @@ type ParallelState struct { Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` } +func (s *ParallelState) DeepCopyState() State { + return s +} + type parallelStateForUnmarshal ParallelState // UnmarshalJSON unmarshal ParallelState object from json bytes diff --git a/model/retry.go b/model/retry.go index cd4c351..2f2e57c 100644 --- a/model/retry.go +++ b/model/retry.go @@ -17,7 +17,7 @@ package model import ( "reflect" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" "k8s.io/apimachinery/pkg/util/intstr" "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" diff --git a/model/sleep_state.go b/model/sleep_state.go index 93798db..c7d2fd4 100644 --- a/model/sleep_state.go +++ b/model/sleep_state.go @@ -24,6 +24,10 @@ type SleepState struct { Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` } +func (in *SleepState) DeepCopyState() State { + return in +} + // SleepStateTimeout defines timeout settings for sleep state type SleepStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` diff --git a/model/states.go b/model/states.go index 51b7e7e..b2523b6 100644 --- a/model/states.go +++ b/model/states.go @@ -35,7 +35,7 @@ const ( StateTypeSleep = "sleep" ) -func getActionsModelMapping(stateType string, s map[string]interface{}) (State, bool) { +func getActionsModelMapping(stateType string) (State, bool) { switch stateType { case StateTypeDelay: return &DelayState{}, true @@ -74,6 +74,8 @@ type State interface { GetUsedForCompensation() bool GetEnd() *End GetMetadata() *Metadata + // DeepCopyState fixes undefined (type State has no field or method DeepCopyState) + DeepCopyState() State } // BaseState ... diff --git a/model/switch_state.go b/model/switch_state.go index 0890060..937f608 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -20,7 +20,7 @@ import ( val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" ) func init() { @@ -46,6 +46,10 @@ type SwitchState struct { Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` } +func (in *SwitchState) DeepCopyState() State { + return in +} + // SwitchStateStructLevelValidation custom validator for SwitchState func SwitchStateStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { switchState := structLevel.Current().Interface().(SwitchState) diff --git a/model/util.go b/model/util.go index 837e2d5..849eb5c 100644 --- a/model/util.go +++ b/model/util.go @@ -21,12 +21,14 @@ import ( "net/http" "os" "path/filepath" + "sigs.k8s.io/yaml" + "strings" "sync/atomic" - - "sigs.k8s.io/yaml" ) +// +k8s:deepcopy-gen=false + const prefix = "file:/" // TRUE used by bool fields that needs a boolean pointer diff --git a/model/workflow.go b/model/workflow.go index ab9168e..e6a9d00 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -20,7 +20,7 @@ import ( "fmt" "reflect" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" val "github.com/serverlessworkflow/sdk-go/v2/validator" ) @@ -197,7 +197,7 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { return err } - actionsMode, ok := getActionsModelMapping(mapState["type"].(string), mapState) + actionsMode, ok := getActionsModelMapping(mapState["type"].(string)) if !ok { return fmt.Errorf("state %s not supported", mapState["type"]) } @@ -549,11 +549,9 @@ type ContinueAs struct { WorkflowID string `json:"workflowId" validate:"required"` // Version of the workflow to continue execution as. Version string `json:"version,omitempty"` - - // TODO: add object or string data type // If string type, an expression which selects parts of the states data output to become the workflow data input of // continued execution. If object type, a custom object to become the workflow data input of the continued execution - Data interface{} `json:"data,omitempty"` + Data Object `json:"data,omitempty"` // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. Overwrites any specific settings set by that workflow WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` } @@ -591,9 +589,11 @@ type ProduceEvent struct { EventRef string `json:"eventRef" validate:"required"` // TODO: add object or string data type // If String, expression which selects parts of the states data output to become the data of the produced event. If object a custom object to become the data of produced event. - Data interface{} `json:"data,omitempty"` + // TODO + Data string `json:"data,omitempty"` // Add additional event extension context attributes - ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` + // TODO map[string]interface{} + ContextAttributes map[string]string `json:"contextAttributes,omitempty"` } // StateDataFilter ... diff --git a/model/workflow_test.go b/model/workflow_test.go index 4a862e0..388fe07 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -36,7 +36,7 @@ func TestContinueAsStructLevelValidation(t *testing.T) { continueAs: ContinueAs{ WorkflowID: "another-test", Version: "2", - Data: "${ del(.customerCount) }", + Data: FromString("${ del(.customerCount) }"), WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "PT1H", Interrupt: false, @@ -50,7 +50,7 @@ func TestContinueAsStructLevelValidation(t *testing.T) { continueAs: ContinueAs{ WorkflowID: "test", Version: "1", - Data: "${ del(.customerCount) }", + Data: FromString("${ del(.customerCount) }"), WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "invalid", }, @@ -95,7 +95,7 @@ func TestContinueAsUnmarshalJSON(t *testing.T) { expect: ContinueAs{ WorkflowID: "1", Version: "2", - Data: "3", + Data: FromString("3"), WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "PT1H", Interrupt: true, @@ -110,7 +110,7 @@ func TestContinueAsUnmarshalJSON(t *testing.T) { expect: ContinueAs{ WorkflowID: "1", Version: "", - Data: nil, + Data: Object{}, WorkflowExecTimeout: WorkflowExecTimeout{ Duration: "", Interrupt: false, diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go new file mode 100644 index 0000000..872570d --- /dev/null +++ b/model/zz_generated.deepcopy.go @@ -0,0 +1,1530 @@ +//go:build !ignore_autogenerated +// +build !ignore_autogenerated + +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Code generated by deepcopy-gen. DO NOT EDIT. + +package model + +import ( + json "encoding/json" + + floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + intstr "k8s.io/apimachinery/pkg/util/intstr" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Action) DeepCopyInto(out *Action) { + *out = *in + if in.FunctionRef != nil { + in, out := &in.FunctionRef, &out.FunctionRef + *out = new(FunctionRef) + (*in).DeepCopyInto(*out) + } + if in.EventRef != nil { + in, out := &in.EventRef, &out.EventRef + *out = new(EventRef) + (*in).DeepCopyInto(*out) + } + if in.SubFlowRef != nil { + in, out := &in.SubFlowRef, &out.SubFlowRef + *out = new(WorkflowRef) + **out = **in + } + out.Sleep = in.Sleep + if in.NonRetryableErrors != nil { + in, out := &in.NonRetryableErrors, &out.NonRetryableErrors + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.RetryableErrors != nil { + in, out := &in.RetryableErrors, &out.RetryableErrors + *out = make([]string, len(*in)) + copy(*out, *in) + } + out.ActionDataFilter = in.ActionDataFilter + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Action. +func (in *Action) DeepCopy() *Action { + if in == nil { + return nil + } + out := new(Action) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ActionDataFilter) DeepCopyInto(out *ActionDataFilter) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ActionDataFilter. +func (in *ActionDataFilter) DeepCopy() *ActionDataFilter { + if in == nil { + return nil + } + out := new(ActionDataFilter) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Auth) DeepCopyInto(out *Auth) { + *out = *in + if in.Properties != nil { + out.Properties = in.Properties.DeepCopyAuthProperties() + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auth. +func (in *Auth) DeepCopy() *Auth { + if in == nil { + return nil + } + out := new(Auth) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in AuthArray) DeepCopyInto(out *AuthArray) { + { + in := &in + *out = make(AuthArray, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthArray. +func (in AuthArray) DeepCopy() AuthArray { + if in == nil { + return nil + } + out := new(AuthArray) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BaseAuthProperties) DeepCopyInto(out *BaseAuthProperties) { + *out = *in + in.Common.DeepCopyInto(&out.Common) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseAuthProperties. +func (in *BaseAuthProperties) DeepCopy() *BaseAuthProperties { + if in == nil { + return nil + } + out := new(BaseAuthProperties) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BaseState) DeepCopyInto(out *BaseState) { + *out = *in + if in.OnErrors != nil { + in, out := &in.OnErrors, &out.OnErrors + *out = make([]OnError, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Transition != nil { + in, out := &in.Transition, &out.Transition + *out = new(Transition) + (*in).DeepCopyInto(*out) + } + if in.StateDataFilter != nil { + in, out := &in.StateDataFilter, &out.StateDataFilter + *out = new(StateDataFilter) + **out = **in + } + if in.End != nil { + in, out := &in.End, &out.End + *out = new(End) + (*in).DeepCopyInto(*out) + } + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = new(Metadata) + if **in != nil { + in, out := *in, *out + *out = make(map[string]Object, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseState. +func (in *BaseState) DeepCopy() *BaseState { + if in == nil { + return nil + } + out := new(BaseState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { + *out = *in + if in.Start != nil { + in, out := &in.Start, &out.Start + *out = new(Start) + (*in).DeepCopyInto(*out) + } + if in.Annotations != nil { + in, out := &in.Annotations, &out.Annotations + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.DataInputSchema != nil { + in, out := &in.DataInputSchema, &out.DataInputSchema + *out = new(DataInputSchema) + (*in).DeepCopyInto(*out) + } + if in.Secrets != nil { + in, out := &in.Secrets, &out.Secrets + *out = make(Secrets, len(*in)) + copy(*out, *in) + } + if in.Constants != nil { + in, out := &in.Constants, &out.Constants + *out = new(Constants) + (*in).DeepCopyInto(*out) + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(Timeouts) + (*in).DeepCopyInto(*out) + } + if in.Errors != nil { + in, out := &in.Errors, &out.Errors + *out = make([]Error, len(*in)) + copy(*out, *in) + } + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(Metadata, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Auth != nil { + in, out := &in.Auth, &out.Auth + *out = make(AuthArray, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseWorkflow. +func (in *BaseWorkflow) DeepCopy() *BaseWorkflow { + if in == nil { + return nil + } + out := new(BaseWorkflow) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BasicAuthProperties) DeepCopyInto(out *BasicAuthProperties) { + *out = *in + in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAuthProperties. +func (in *BasicAuthProperties) DeepCopy() *BasicAuthProperties { + if in == nil { + return nil + } + out := new(BasicAuthProperties) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BearerAuthProperties) DeepCopyInto(out *BearerAuthProperties) { + *out = *in + in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BearerAuthProperties. +func (in *BearerAuthProperties) DeepCopy() *BearerAuthProperties { + if in == nil { + return nil + } + out := new(BearerAuthProperties) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Branch) DeepCopyInto(out *Branch) { + *out = *in + if in.Actions != nil { + in, out := &in.Actions, &out.Actions + *out = make([]Action, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(BranchTimeouts) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Branch. +func (in *Branch) DeepCopy() *Branch { + if in == nil { + return nil + } + out := new(Branch) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BranchTimeouts) DeepCopyInto(out *BranchTimeouts) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BranchTimeouts. +func (in *BranchTimeouts) DeepCopy() *BranchTimeouts { + if in == nil { + return nil + } + out := new(BranchTimeouts) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CallbackState) DeepCopyInto(out *CallbackState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + in.Action.DeepCopyInto(&out.Action) + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(CallbackStateTimeout) + (*in).DeepCopyInto(*out) + } + out.EventDataFilter = in.EventDataFilter + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackState. +func (in *CallbackState) DeepCopy() *CallbackState { + if in == nil { + return nil + } + out := new(CallbackState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CallbackStateTimeout) DeepCopyInto(out *CallbackStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackStateTimeout. +func (in *CallbackStateTimeout) DeepCopy() *CallbackStateTimeout { + if in == nil { + return nil + } + out := new(CallbackStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Common) DeepCopyInto(out *Common) { + *out = *in + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(Metadata, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Common. +func (in *Common) DeepCopy() *Common { + if in == nil { + return nil + } + out := new(Common) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Constants) DeepCopyInto(out *Constants) { + *out = *in + if in.Data != nil { + in, out := &in.Data, &out.Data + *out = make(map[string]json.RawMessage, len(*in)) + for key, val := range *in { + var outVal []byte + if val == nil { + (*out)[key] = nil + } else { + in, out := &val, &outVal + *out = make(json.RawMessage, len(*in)) + copy(*out, *in) + } + (*out)[key] = outVal + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Constants. +func (in *Constants) DeepCopy() *Constants { + if in == nil { + return nil + } + out := new(Constants) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ContinueAs) DeepCopyInto(out *ContinueAs) { + *out = *in + in.Data.DeepCopyInto(&out.Data) + out.WorkflowExecTimeout = in.WorkflowExecTimeout + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ContinueAs. +func (in *ContinueAs) DeepCopy() *ContinueAs { + if in == nil { + return nil + } + out := new(ContinueAs) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Correlation) DeepCopyInto(out *Correlation) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation. +func (in *Correlation) DeepCopy() *Correlation { + if in == nil { + return nil + } + out := new(Correlation) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Cron) DeepCopyInto(out *Cron) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cron. +func (in *Cron) DeepCopy() *Cron { + if in == nil { + return nil + } + out := new(Cron) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataCondition) DeepCopyInto(out *DataCondition) { + *out = *in + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(Metadata, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.End != nil { + in, out := &in.End, &out.End + *out = new(End) + (*in).DeepCopyInto(*out) + } + if in.Transition != nil { + in, out := &in.Transition, &out.Transition + *out = new(Transition) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCondition. +func (in *DataCondition) DeepCopy() *DataCondition { + if in == nil { + return nil + } + out := new(DataCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { + *out = *in + if in.FailOnValidationErrors != nil { + in, out := &in.FailOnValidationErrors, &out.FailOnValidationErrors + *out = new(bool) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSchema. +func (in *DataInputSchema) DeepCopy() *DataInputSchema { + if in == nil { + return nil + } + out := new(DataInputSchema) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DefaultCondition) DeepCopyInto(out *DefaultCondition) { + *out = *in + if in.Transition != nil { + in, out := &in.Transition, &out.Transition + *out = new(Transition) + (*in).DeepCopyInto(*out) + } + if in.End != nil { + in, out := &in.End, &out.End + *out = new(End) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultCondition. +func (in *DefaultCondition) DeepCopy() *DefaultCondition { + if in == nil { + return nil + } + out := new(DefaultCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DelayState) DeepCopyInto(out *DelayState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DelayState. +func (in *DelayState) DeepCopy() *DelayState { + if in == nil { + return nil + } + out := new(DelayState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *End) DeepCopyInto(out *End) { + *out = *in + if in.ProduceEvents != nil { + in, out := &in.ProduceEvents, &out.ProduceEvents + *out = make([]ProduceEvent, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.ContinueAs != nil { + in, out := &in.ContinueAs, &out.ContinueAs + *out = new(ContinueAs) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new End. +func (in *End) DeepCopy() *End { + if in == nil { + return nil + } + out := new(End) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Error) DeepCopyInto(out *Error) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Error. +func (in *Error) DeepCopy() *Error { + if in == nil { + return nil + } + out := new(Error) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Event) DeepCopyInto(out *Event) { + *out = *in + in.Common.DeepCopyInto(&out.Common) + if in.Correlation != nil { + in, out := &in.Correlation, &out.Correlation + *out = make([]Correlation, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Event. +func (in *Event) DeepCopy() *Event { + if in == nil { + return nil + } + out := new(Event) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventCondition) DeepCopyInto(out *EventCondition) { + *out = *in + out.EventDataFilter = in.EventDataFilter + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(Metadata, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.End != nil { + in, out := &in.End, &out.End + *out = new(End) + (*in).DeepCopyInto(*out) + } + if in.Transition != nil { + in, out := &in.Transition, &out.Transition + *out = new(Transition) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventCondition. +func (in *EventCondition) DeepCopy() *EventCondition { + if in == nil { + return nil + } + out := new(EventCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventDataFilter) DeepCopyInto(out *EventDataFilter) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDataFilter. +func (in *EventDataFilter) DeepCopy() *EventDataFilter { + if in == nil { + return nil + } + out := new(EventDataFilter) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventRef) DeepCopyInto(out *EventRef) { + *out = *in + in.Data.DeepCopyInto(&out.Data) + if in.ContextAttributes != nil { + in, out := &in.ContextAttributes, &out.ContextAttributes + *out = make(map[string]Object, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventRef. +func (in *EventRef) DeepCopy() *EventRef { + if in == nil { + return nil + } + out := new(EventRef) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventState) DeepCopyInto(out *EventState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.OnEvents != nil { + in, out := &in.OnEvents, &out.OnEvents + *out = make([]OnEvents, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(EventStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventState. +func (in *EventState) DeepCopy() *EventState { + if in == nil { + return nil + } + out := new(EventState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventStateTimeout) DeepCopyInto(out *EventStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventStateTimeout. +func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { + if in == nil { + return nil + } + out := new(EventStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ForEachState) DeepCopyInto(out *ForEachState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.BatchSize != nil { + in, out := &in.BatchSize, &out.BatchSize + *out = new(intstr.IntOrString) + **out = **in + } + if in.Actions != nil { + in, out := &in.Actions, &out.Actions + *out = make([]Action, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(ForEachStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachState. +func (in *ForEachState) DeepCopy() *ForEachState { + if in == nil { + return nil + } + out := new(ForEachState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ForEachStateTimeout) DeepCopyInto(out *ForEachStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachStateTimeout. +func (in *ForEachStateTimeout) DeepCopy() *ForEachStateTimeout { + if in == nil { + return nil + } + out := new(ForEachStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Function) DeepCopyInto(out *Function) { + *out = *in + in.Common.DeepCopyInto(&out.Common) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Function. +func (in *Function) DeepCopy() *Function { + if in == nil { + return nil + } + out := new(Function) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FunctionRef) DeepCopyInto(out *FunctionRef) { + *out = *in + if in.Arguments != nil { + in, out := &in.Arguments, &out.Arguments + *out = make(map[string]Object, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FunctionRef. +func (in *FunctionRef) DeepCopy() *FunctionRef { + if in == nil { + return nil + } + out := new(FunctionRef) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InjectState) DeepCopyInto(out *InjectState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.Data != nil { + in, out := &in.Data, &out.Data + *out = make(map[string]Object, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(InjectStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectState. +func (in *InjectState) DeepCopy() *InjectState { + if in == nil { + return nil + } + out := new(InjectState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InjectStateTimeout) DeepCopyInto(out *InjectStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectStateTimeout. +func (in *InjectStateTimeout) DeepCopy() *InjectStateTimeout { + if in == nil { + return nil + } + out := new(InjectStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Metadata) DeepCopyInto(out *Metadata) { + { + in := &in + *out = make(Metadata, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Metadata. +func (in Metadata) DeepCopy() Metadata { + if in == nil { + return nil + } + out := new(Metadata) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OAuth2AuthProperties) DeepCopyInto(out *OAuth2AuthProperties) { + *out = *in + in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + if in.Scopes != nil { + in, out := &in.Scopes, &out.Scopes + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Audiences != nil { + in, out := &in.Audiences, &out.Audiences + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2AuthProperties. +func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { + if in == nil { + return nil + } + out := new(OAuth2AuthProperties) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Object) DeepCopyInto(out *Object) { + *out = *in + if in.IObject != nil { + out.IObject = in.IObject.DeepCopyIObject() + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Object. +func (in *Object) DeepCopy() *Object { + if in == nil { + return nil + } + out := new(Object) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OnError) DeepCopyInto(out *OnError) { + *out = *in + if in.ErrorRefs != nil { + in, out := &in.ErrorRefs, &out.ErrorRefs + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Transition != nil { + in, out := &in.Transition, &out.Transition + *out = new(Transition) + (*in).DeepCopyInto(*out) + } + if in.End != nil { + in, out := &in.End, &out.End + *out = new(End) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnError. +func (in *OnError) DeepCopy() *OnError { + if in == nil { + return nil + } + out := new(OnError) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OnEvents) DeepCopyInto(out *OnEvents) { + *out = *in + if in.EventRefs != nil { + in, out := &in.EventRefs, &out.EventRefs + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Actions != nil { + in, out := &in.Actions, &out.Actions + *out = make([]Action, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.EventDataFilter = in.EventDataFilter + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnEvents. +func (in *OnEvents) DeepCopy() *OnEvents { + if in == nil { + return nil + } + out := new(OnEvents) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OperationState) DeepCopyInto(out *OperationState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.Actions != nil { + in, out := &in.Actions, &out.Actions + *out = make([]Action, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(OperationStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationState. +func (in *OperationState) DeepCopy() *OperationState { + if in == nil { + return nil + } + out := new(OperationState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OperationStateTimeout) DeepCopyInto(out *OperationStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationStateTimeout. +func (in *OperationStateTimeout) DeepCopy() *OperationStateTimeout { + if in == nil { + return nil + } + out := new(OperationStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ParallelState) DeepCopyInto(out *ParallelState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.Branches != nil { + in, out := &in.Branches, &out.Branches + *out = make([]Branch, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.NumCompleted = in.NumCompleted + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(ParallelStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelState. +func (in *ParallelState) DeepCopy() *ParallelState { + if in == nil { + return nil + } + out := new(ParallelState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ParallelStateTimeout) DeepCopyInto(out *ParallelStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelStateTimeout. +func (in *ParallelStateTimeout) DeepCopy() *ParallelStateTimeout { + if in == nil { + return nil + } + out := new(ParallelStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ProduceEvent) DeepCopyInto(out *ProduceEvent) { + *out = *in + if in.ContextAttributes != nil { + in, out := &in.ContextAttributes, &out.ContextAttributes + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProduceEvent. +func (in *ProduceEvent) DeepCopy() *ProduceEvent { + if in == nil { + return nil + } + out := new(ProduceEvent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Retry) DeepCopyInto(out *Retry) { + *out = *in + if in.Multiplier != nil { + in, out := &in.Multiplier, &out.Multiplier + *out = new(floatstr.Float32OrString) + **out = **in + } + out.MaxAttempts = in.MaxAttempts + out.Jitter = in.Jitter + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retry. +func (in *Retry) DeepCopy() *Retry { + if in == nil { + return nil + } + out := new(Retry) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Schedule) DeepCopyInto(out *Schedule) { + *out = *in + if in.Cron != nil { + in, out := &in.Cron, &out.Cron + *out = new(Cron) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schedule. +func (in *Schedule) DeepCopy() *Schedule { + if in == nil { + return nil + } + out := new(Schedule) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Secrets) DeepCopyInto(out *Secrets) { + { + in := &in + *out = make(Secrets, len(*in)) + copy(*out, *in) + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Secrets. +func (in Secrets) DeepCopy() Secrets { + if in == nil { + return nil + } + out := new(Secrets) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Sleep) DeepCopyInto(out *Sleep) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Sleep. +func (in *Sleep) DeepCopy() *Sleep { + if in == nil { + return nil + } + out := new(Sleep) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SleepState) DeepCopyInto(out *SleepState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(SleepStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepState. +func (in *SleepState) DeepCopy() *SleepState { + if in == nil { + return nil + } + out := new(SleepState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SleepStateTimeout) DeepCopyInto(out *SleepStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepStateTimeout. +func (in *SleepStateTimeout) DeepCopy() *SleepStateTimeout { + if in == nil { + return nil + } + out := new(SleepStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Start) DeepCopyInto(out *Start) { + *out = *in + if in.Schedule != nil { + in, out := &in.Schedule, &out.Schedule + *out = new(Schedule) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Start. +func (in *Start) DeepCopy() *Start { + if in == nil { + return nil + } + out := new(Start) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *StateDataFilter) DeepCopyInto(out *StateDataFilter) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateDataFilter. +func (in *StateDataFilter) DeepCopy() *StateDataFilter { + if in == nil { + return nil + } + out := new(StateDataFilter) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *StateExecTimeout) DeepCopyInto(out *StateExecTimeout) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateExecTimeout. +func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { + if in == nil { + return nil + } + out := new(StateExecTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SwitchState) DeepCopyInto(out *SwitchState) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) + if in.EventConditions != nil { + in, out := &in.EventConditions, &out.EventConditions + *out = make([]EventCondition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.DataConditions != nil { + in, out := &in.DataConditions, &out.DataConditions + *out = make([]DataCondition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Timeouts != nil { + in, out := &in.Timeouts, &out.Timeouts + *out = new(SwitchStateTimeout) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchState. +func (in *SwitchState) DeepCopy() *SwitchState { + if in == nil { + return nil + } + out := new(SwitchState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SwitchStateTimeout) DeepCopyInto(out *SwitchStateTimeout) { + *out = *in + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchStateTimeout. +func (in *SwitchStateTimeout) DeepCopy() *SwitchStateTimeout { + if in == nil { + return nil + } + out := new(SwitchStateTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Timeouts) DeepCopyInto(out *Timeouts) { + *out = *in + if in.WorkflowExecTimeout != nil { + in, out := &in.WorkflowExecTimeout, &out.WorkflowExecTimeout + *out = new(WorkflowExecTimeout) + **out = **in + } + if in.StateExecTimeout != nil { + in, out := &in.StateExecTimeout, &out.StateExecTimeout + *out = new(StateExecTimeout) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Timeouts. +func (in *Timeouts) DeepCopy() *Timeouts { + if in == nil { + return nil + } + out := new(Timeouts) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Transition) DeepCopyInto(out *Transition) { + *out = *in + if in.ProduceEvents != nil { + in, out := &in.ProduceEvents, &out.ProduceEvents + *out = make([]ProduceEvent, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Transition. +func (in *Transition) DeepCopy() *Transition { + if in == nil { + return nil + } + out := new(Transition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Workflow) DeepCopyInto(out *Workflow) { + *out = *in + in.BaseWorkflow.DeepCopyInto(&out.BaseWorkflow) + if in.States != nil { + in, out := &in.States, &out.States + *out = make([]State, len(*in)) + for i := range *in { + if (*in)[i] != nil { + (*out)[i] = (*in)[i].DeepCopyState() + } + } + } + if in.Events != nil { + in, out := &in.Events, &out.Events + *out = make([]Event, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Functions != nil { + in, out := &in.Functions, &out.Functions + *out = make([]Function, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Retries != nil { + in, out := &in.Retries, &out.Retries + *out = make([]Retry, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Workflow. +func (in *Workflow) DeepCopy() *Workflow { + if in == nil { + return nil + } + out := new(Workflow) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkflowExecTimeout) DeepCopyInto(out *WorkflowExecTimeout) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowExecTimeout. +func (in *WorkflowExecTimeout) DeepCopy() *WorkflowExecTimeout { + if in == nil { + return nil + } + out := new(WorkflowExecTimeout) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkflowRef) DeepCopyInto(out *WorkflowRef) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowRef. +func (in *WorkflowRef) DeepCopy() *WorkflowRef { + if in == nil { + return nil + } + out := new(WorkflowRef) + in.DeepCopyInto(out) + return out +} diff --git a/parser/parser_test.go b/parser/parser_test.go index 37b93c9..3ce6a60 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -211,6 +211,9 @@ func TestFromFile(t *testing.T) { basicProperties := auth[1].Properties.(*model.BasicAuthProperties) assert.Equal(t, "test_user", basicProperties.Username) assert.Equal(t, "test_pwd", basicProperties.Password) + // metadata + assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) + assert.Equal(t, &model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.GetMetadata()) }, }, { "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { @@ -334,14 +337,14 @@ func TestFromFile(t *testing.T) { endDataCondition := eventState.DataConditions[0] assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) - assert.Equal(t, "${ del(.customerCount) }", endDataCondition.End.ContinueAs.Data) + assert.Equal(t, model.FromString("${ del(.customerCount) }"), endDataCondition.End.ContinueAs.Data) assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) }, }, { - "./testdata/workflows/greetings-v08-spec.sw.yaml", - func(t *testing.T, w *model.Workflow) { + name: "./testdata/workflows/greetings-v08-spec.sw.yaml", + f: func(t *testing.T, w *model.Workflow) { assert.Equal(t, "custom.greeting", w.ID) assert.Equal(t, "1.0", w.Version) assert.Equal(t, "0.8", w.SpecVersion) @@ -383,7 +386,12 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "test", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.Data) assert.Equal(t, "testing", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.ToStateData) assert.Equal(t, model.ActionModeParallel, w.States[1].(*model.EventState).OnEvents[0].ActionMode) + assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents[0].Actions[0].FunctionRef) + assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef) + assert.Equal(t, model.FromString("${ .patientInfo }"), w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef.Data) + assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef.ContextAttributes) + assert.Equal(t, "PT1S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT2S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Single) assert.Equal(t, "PT1H", w.States[1].(*model.EventState).Timeouts.EventTimeout) @@ -458,7 +466,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "PT22S", w.States[6].(*model.ForEachState).Timeouts.StateExecTimeout.Single) // Inject state - assert.Equal(t, map[string]interface{}{"result": "Hello World, last state!"}, w.States[7].(*model.InjectState).Data) + assert.Equal(t, map[string]model.Object{"result": model.FromString("Hello World, last state!")}, w.States[7].(*model.InjectState).Data) assert.Equal(t, "HelloInject", w.States[7].GetName()) assert.Equal(t, model.StateType("inject"), w.States[7].GetType()) assert.Equal(t, "PT11M", w.States[7].(*model.InjectState).Timeouts.StateExecTimeout.Total) @@ -469,7 +477,8 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "CheckCreditCallback", w.States[8].GetName()) assert.Equal(t, model.StateType("callback"), w.States[8].GetType()) assert.Equal(t, "callCreditCheckMicroservice", w.States[8].(*model.CallbackState).Action.FunctionRef.RefName) - assert.Equal(t, map[string]interface{}{"customer": "${ .customer }"}, w.States[8].(*model.CallbackState).Action.FunctionRef.Arguments) + assert.Equal(t, map[string]model.Object{"argsObj": model.FromRaw(map[string]interface{}{"age": 10, "name": "hi"}), "customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, + w.States[8].(*model.CallbackState).Action.FunctionRef.Arguments) assert.Equal(t, "PT10S", w.States[8].(*model.CallbackState).Action.Sleep.Before) assert.Equal(t, "PT20S", w.States[8].(*model.CallbackState).Action.Sleep.After) assert.Equal(t, "PT150M", w.States[8].(*model.CallbackState).Timeouts.ActionExecTimeout) @@ -648,6 +657,17 @@ specVersion: '0.8' name: Hello World Workflow description: Inject Hello World start: Hello State +metadata: + metadata1: metadata1 + metadata2: metadata2 +auth: +- name: testAuth + scheme: bearer + properties: + token: test_token + metadata: + auth1: auth1 + auth2: auth2 states: - name: Hello State type: switch @@ -660,12 +680,48 @@ states: nextState: HandleRejectedVisa defaultCondition: transition: - nextState: HandleNoVisaDecision + nextState: CheckCreditCallback +- name: HelloInject + type: inject + data: + result: Hello World, another state! +- name: CheckCreditCallback + type: callback + action: + functionRef: + refName: callCreditCheckMicroservice + arguments: + customer: "${ .customer }" + time: 48 + argsObj: { + "name" : "hi", + "age": { + "initial": 10, + "final": 32 + } + } + sleep: + before: PT10S + after: PT20S + eventRef: CreditCheckCompletedEvent + timeouts: + actionExecTimeout: PT150M + eventTimeout: PT34S + stateExecTimeout: + total: PT115M + single: PT22M - name: HandleApprovedVisa type: operation actions: - subFlowRef: workflowId: handleApprovedVisaWorkflowID + - eventRef: + triggerEventRef: StoreBidFunction + data: "${ .patientInfo }" + resultEventRef: StoreBidFunction + contextAttributes: + customer: "${ .customer }" + time: 48 end: terminate: true - name: HandleRejectedVisa @@ -690,6 +746,12 @@ states: b, err := json.Marshal(workflow) assert.Nil(t, err) assert.True(t, strings.Contains(string(b), "eventConditions")) + assert.True(t, strings.Contains(string(b), "\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48}")) + assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) + assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) + assert.True(t, strings.Contains(string(b), "\"data\":\"${ .patientInfo }\"")) + assert.True(t, strings.Contains(string(b), "\"contextAttributes\":{\"customer\":\"${ .customer }\",\"time\":48}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"data\":{\"result\":\"Hello World, another state!\"}}")) workflow = nil err = json.Unmarshal(b, &workflow) diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json index 66159a5..cd7391d 100644 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ b/parser/testdata/workflows/applicationrequest.multiauth.json @@ -5,12 +5,20 @@ "description": "Determine if applicant request is valid", "start": "CheckApplication", "specVersion": "0.8", + "metadata":{ + "metadata1": "metadata1", + "metadata2": "metadata2" + }, "auth": [ { "name": "testAuth", "scheme": "bearer", "properties": { - "token": "test_token" + "token": "test_token", + "metadata":{ + "auth1": "auth1", + "auth2": "auth2" + } } }, { diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index c1d46be..336321c 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -50,6 +50,13 @@ states: refName: StoreBidFunction arguments: bid: "${ .bid }" + - eventRef: + triggerEventRef: StoreBidFunction + data: "${ .patientInfo }" + resultEventRef: StoreBidFunction + contextAttributes: + customer: "${ .customer }" + time: 48 timeouts: eventTimeout: PT1H actionExecTimeout: PT3S @@ -158,6 +165,11 @@ states: refName: callCreditCheckMicroservice arguments: customer: "${ .customer }" + argsObj: { + "name" : "hi", + "age": 10 + } + time: 48 sleep: before: PT10S after: PT20S From a2f539d7559be7a2bc06011b36bec6bb3fc80122 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 20 Dec 2022 09:57:29 -0300 Subject: [PATCH 043/110] Prepare Release v2.2.0 (#132) Signed-off-by: Ricardo Zanini Signed-off-by: Ricardo Zanini --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7120046..1432538 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,8 @@ Current status of features implemented in the SDK is listed in the table below: | :---: | :---: | | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | -| [v2.1.x](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.0) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.2.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use @@ -64,4 +65,4 @@ The `Workflow` structure then can be used in your application. ## Slack Channel -Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. \ No newline at end of file +Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. From 5ac96029df2d253646bbd14b39ba8f2c2ba6d369 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 20 Dec 2022 12:20:48 -0300 Subject: [PATCH 044/110] Fix CVE-2022-32149 (#133) Signed-off-by: Ricardo Zanini Signed-off-by: Ricardo Zanini --- go.mod | 2 ++ go.sum | 17 +++++++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 73df64b..de9bc32 100644 --- a/go.mod +++ b/go.mod @@ -28,3 +28,5 @@ require ( k8s.io/klog/v2 v2.70.1 // indirect sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect ) + +replace golang.org/x/text => golang.org/x/text v0.3.8 diff --git a/go.sum b/go.sum index 9657239..d1b738b 100644 --- a/go.sum +++ b/go.sum @@ -45,22 +45,28 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d h1:3qF+Z8Hkrw9sOhrFHti9TlB1Hkac1x+DNRkv0XQiFjo= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -68,18 +74,17 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 2ce428ebe183c904b2346986891c0603cd4fc652 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 20 Dec 2022 12:37:17 -0300 Subject: [PATCH 045/110] Prepare for release v2.2.1 (#134) Signed-off-by: Ricardo Zanini Signed-off-by: Ricardo Zanini --- .github/OWNERS | 2 -- README.md | 6 +++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/OWNERS b/.github/OWNERS index 9ed057f..066fb2b 100644 --- a/.github/OWNERS +++ b/.github/OWNERS @@ -1,6 +1,4 @@ reviewers: - - tsurdilo - ricardozanini approvers: - - tsurdilo - ricardozanini diff --git a/README.md b/README.md index 1432538..786225f 100644 --- a/README.md +++ b/README.md @@ -14,12 +14,12 @@ Current status of features implemented in the SDK is listed in the table below: ## Status -| Latest Releases | Conformance to spec version | -| :---: | :---: | +| Latest Releases | Conformance to spec version | +|:--------------------------------------------------------------------------:| :---: | | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.2.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 71cb97bbf23c58403158aab9ef4e109efa6dab56 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Mon, 9 Jan 2023 17:00:05 -0300 Subject: [PATCH 046/110] Add basic code style helper guide and default linters (#136) * Add basic code style helper guide and default linters - fixes https://github.com/serverlessworkflow/sdk-go/issues/98 Signed-off-by: Spolti * review additions Signed-off-by: Spolti * suggestion additions Signed-off-by: Spolti Signed-off-by: Spolti --- CONTRIBUTORS_GUIDE.md | 53 ++++++++++++++++++++++++ Makefile | 2 +- README.md | 73 +++++++++++++++++++++++++++++++++- contrib/intellij.editorconfig | 30 ++++++++++++++ hack/go-lint.sh | 2 +- maintainer_guidelines.md | 2 +- model/object.go | 2 +- model/util.go | 1 + util/floatstr/floatstr_test.go | 3 +- 9 files changed, 161 insertions(+), 7 deletions(-) create mode 100644 CONTRIBUTORS_GUIDE.md create mode 100644 contrib/intellij.editorconfig diff --git a/CONTRIBUTORS_GUIDE.md b/CONTRIBUTORS_GUIDE.md new file mode 100644 index 0000000..e604bd0 --- /dev/null +++ b/CONTRIBUTORS_GUIDE.md @@ -0,0 +1,53 @@ +# Contributors Guide + +This guide aims to guide newcomers to getting started with the project standards. + + +## Code Style + +For this project we use basically the default configuration for most used IDEs. +For the configurations below, make sure to properly configure your IDE: + +- **imports**: goimports + +This should be enough to get you started. + +If you are unsure that your IDE is not correctly configured, you can run the lint checks: + +```bash +make lint +``` + +If something goes wrong, the error will be printed, e.g.: +```bash +$ make lint +make addheaders +make fmt +./hack/go-lint.sh +util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) + "k8s.io/apimachinery/pkg/util/yaml" +make: *** [lint] Error 1 +``` + +Lint issues can be fixed with the `--fix` flag, this command can be used: +```bash +make lint params=--fix +``` + + +### EditorConfig +For IntelliJ you can find an example `editorconfig` file [here](contrib/intellij.editorconfig). To use it please visit +the Jetbrains [documentation](https://www.jetbrains.com/help/idea/editorconfig.html). + + +## Known Issues + +On MacOSX/darwin you might get this issue: +``` + goimports: can't extract issues from gofmt diff output +``` +To solve install the `diffutils` package: + +```bash + brew install diffutils +``` \ No newline at end of file diff --git a/Makefile b/Makefile index 5a7c8a4..f713372 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ lint: @command -v golangci-lint > /dev/null || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin" make addheaders make fmt - ./hack/go-lint.sh + ./hack/go-lint.sh ${params} .PHONY: test coverage="false" diff --git a/README.md b/README.md index 786225f..bf998fd 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,21 @@ # Go SDK for Serverless Workflow - Here you will find all the [specification types](https://github.com/serverlessworkflow/specification/blob/main/schema/workflow.json) defined by our Json Schemas, in Go. +Table of Contents +================= + +- [Status](#status) +- [Releases](#releases) +- [How to Use](#how-to-use) + - [Parsing Serverless Workflow files](#parsing-serverless-workflow-files) +- [Slack Channel](#slack-channel) +- [Contributors Guide](#contributors-guide) + - [Code Style](#code-style) + - [EditorConfig](#editorconfig) + - [Known Issues](#known-issues) + + +## Status Current status of features implemented in the SDK is listed in the table below: | Feature | Status | @@ -12,8 +26,8 @@ Current status of features implemented in the SDK is listed in the table below: | Validate workflow definitions (Integrity) | :heavy_check_mark: | | Generate workflow diagram (SVG) | :no_entry_sign: | -## Status +## Releases | Latest Releases | Conformance to spec version | |:--------------------------------------------------------------------------:| :---: | | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | @@ -66,3 +80,58 @@ The `Workflow` structure then can be used in your application. ## Slack Channel Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. + +## Contributors Guide + +This guide aims to guide newcomers to getting started with the project standards. + + +### Code Style + +For this project we use basically the default configuration for most used IDEs. +For the configurations below, make sure to properly configure your IDE: + +- **imports**: goimports + +This should be enough to get you started. + +If you are unsure that your IDE is not correctly configured, you can run the lint checks: + +```bash +make lint +``` + +If something goes wrong, the error will be printed, e.g.: +```bash +$ make lint +make addheaders +make fmt +./hack/go-lint.sh +util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) + "k8s.io/apimachinery/pkg/util/yaml" +make: *** [lint] Error 1 +``` + +Lint issues can be fixed with the `--fix` flag, this command can be used: +```bash +make lint params=--fix +``` + + +### EditorConfig +For IntelliJ you can find an example `editorconfig` file [here](contrib/intellij.editorconfig). To use it please visit +the Jetbrains [documentation](https://www.jetbrains.com/help/idea/editorconfig.html). + + +### Known Issues + +On MacOSX/darwin you might get this issue: +``` + goimports: can't extract issues from gofmt diff output +``` +To solve install the `diffutils` package: + +```bash + brew install diffutils +``` + diff --git a/contrib/intellij.editorconfig b/contrib/intellij.editorconfig new file mode 100644 index 0000000..5b5a1ca --- /dev/null +++ b/contrib/intellij.editorconfig @@ -0,0 +1,30 @@ +root = true + +[{*.go,*.go2}] +indent_style = tab +ij_continuation_indent_size = 4 +ij_go_GROUP_CURRENT_PROJECT_IMPORTS = true +ij_go_add_leading_space_to_comments = true +ij_go_add_parentheses_for_single_import = false +ij_go_call_parameters_new_line_after_left_paren = true +ij_go_call_parameters_right_paren_on_new_line = true +ij_go_call_parameters_wrap = off +ij_go_fill_paragraph_width = 80 +ij_go_group_stdlib_imports = true +ij_go_import_sorting = goimports +ij_go_keep_indents_on_empty_lines = false +ij_go_local_group_mode = project +ij_go_move_all_imports_in_one_declaration = true +ij_go_move_all_stdlib_imports_in_one_group = false +ij_go_remove_redundant_import_aliases = true +ij_go_run_go_fmt_on_reformat = true +ij_go_use_back_quotes_for_imports = false +ij_go_wrap_comp_lit = off +ij_go_wrap_comp_lit_newline_after_lbrace = true +ij_go_wrap_comp_lit_newline_before_rbrace = true +ij_go_wrap_func_params = off +ij_go_wrap_func_params_newline_after_lparen = true +ij_go_wrap_func_params_newline_before_rparen = true +ij_go_wrap_func_result = off +ij_go_wrap_func_result_newline_after_lparen = true +ij_go_wrap_func_result_newline_before_rparen = true \ No newline at end of file diff --git a/hack/go-lint.sh b/hack/go-lint.sh index a9c0251..7b034aa 100755 --- a/hack/go-lint.sh +++ b/hack/go-lint.sh @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -golangci-lint run ./... --timeout 2m0s +golangci-lint run -E goimports -E errorlint -E gosec "${1}" ./... --timeout 2m0s diff --git a/maintainer_guidelines.md b/maintainer_guidelines.md index ecd1b11..d40d33b 100644 --- a/maintainer_guidelines.md +++ b/maintainer_guidelines.md @@ -16,7 +16,7 @@ Here are a few tips for repository maintainers. ## Branch Management -The `main` branch is is the bleeding edge. New major versions of the module +The `main` branch is the bleeding edge. New major versions of the module are cut from this branch and tagged. If you intend to submit a pull request you should use `main HEAD` as your starting point. diff --git a/model/object.go b/model/object.go index cdb87ac..17895d3 100644 --- a/model/object.go +++ b/model/object.go @@ -116,7 +116,7 @@ func (obj *Object) UnmarshalJSON(data []byte) error { default: // json parses all not typed numbers as float64, let's enforce to int32 if valInt, parseErr := strconv.Atoi(fmt.Sprint(val)); parseErr != nil { - return fmt.Errorf("falied to parse %d to int32: %s", valInt, parseErr.Error()) + return fmt.Errorf("falied to parse %d to int32: %w", valInt, parseErr) } else { var intVal Integer if err := json.Unmarshal(data, &intVal); err != nil { diff --git a/model/util.go b/model/util.go index 849eb5c..d90f966 100644 --- a/model/util.go +++ b/model/util.go @@ -21,6 +21,7 @@ import ( "net/http" "os" "path/filepath" + "sigs.k8s.io/yaml" "strings" diff --git a/util/floatstr/floatstr_test.go b/util/floatstr/floatstr_test.go index c01f2b2..ee25fbe 100644 --- a/util/floatstr/floatstr_test.go +++ b/util/floatstr/floatstr_test.go @@ -16,9 +16,10 @@ package floatstr import ( "encoding/json" - "k8s.io/apimachinery/pkg/util/yaml" "reflect" "testing" + + "k8s.io/apimachinery/pkg/util/yaml" ) func TestFromFloat(t *testing.T) { From 70fa52569950056143831ee5bb00ab48bf3e1966 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Mon, 9 Jan 2023 17:14:47 -0300 Subject: [PATCH 047/110] remove contributors_guide.md file (#137) Signed-off-by: Spolti Signed-off-by: Spolti --- CONTRIBUTORS_GUIDE.md | 53 ------------------------------------------- 1 file changed, 53 deletions(-) delete mode 100644 CONTRIBUTORS_GUIDE.md diff --git a/CONTRIBUTORS_GUIDE.md b/CONTRIBUTORS_GUIDE.md deleted file mode 100644 index e604bd0..0000000 --- a/CONTRIBUTORS_GUIDE.md +++ /dev/null @@ -1,53 +0,0 @@ -# Contributors Guide - -This guide aims to guide newcomers to getting started with the project standards. - - -## Code Style - -For this project we use basically the default configuration for most used IDEs. -For the configurations below, make sure to properly configure your IDE: - -- **imports**: goimports - -This should be enough to get you started. - -If you are unsure that your IDE is not correctly configured, you can run the lint checks: - -```bash -make lint -``` - -If something goes wrong, the error will be printed, e.g.: -```bash -$ make lint -make addheaders -make fmt -./hack/go-lint.sh -util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) - "k8s.io/apimachinery/pkg/util/yaml" -make: *** [lint] Error 1 -``` - -Lint issues can be fixed with the `--fix` flag, this command can be used: -```bash -make lint params=--fix -``` - - -### EditorConfig -For IntelliJ you can find an example `editorconfig` file [here](contrib/intellij.editorconfig). To use it please visit -the Jetbrains [documentation](https://www.jetbrains.com/help/idea/editorconfig.html). - - -## Known Issues - -On MacOSX/darwin you might get this issue: -``` - goimports: can't extract issues from gofmt diff output -``` -To solve install the `diffutils` package: - -```bash - brew install diffutils -``` \ No newline at end of file From a66005018c3fa79387f4cf9f2f93d2165bc5e5cd Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Wed, 18 Jan 2023 16:24:47 -0300 Subject: [PATCH 048/110] add json tag to nested objects (#138) Signed-off-by: Spolti Signed-off-by: Spolti --- hack/go-lint.sh | 2 +- model/auth.go | 8 ++++---- model/function.go | 2 +- model/object.go | 2 +- model/states.go | 6 +++--- model/workflow.go | 1 - 6 files changed, 10 insertions(+), 11 deletions(-) diff --git a/hack/go-lint.sh b/hack/go-lint.sh index 7b034aa..110ad60 100755 --- a/hack/go-lint.sh +++ b/hack/go-lint.sh @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -golangci-lint run -E goimports -E errorlint -E gosec "${1}" ./... --timeout 2m0s +golangci-lint run -E goimports -E errorlint -E gosec ${1} ./... --timeout 2m0s diff --git a/model/auth.go b/model/auth.go index bae7a29..decef7c 100644 --- a/model/auth.go +++ b/model/auth.go @@ -117,7 +117,7 @@ type AuthProperties interface { // BaseAuthProperties ... type BaseAuthProperties struct { - Common + Common `json:",inline"` // Secret Expression referencing a workflow secret that contains all needed auth info Secret string `json:"secret,omitempty"` } @@ -157,7 +157,7 @@ func (b *BasicAuthProperties) DeepCopyAuthProperties() AuthProperties { // BasicAuthProperties Basic Auth Info type BasicAuthProperties struct { - BaseAuthProperties + BaseAuthProperties `json:",inline"` // Username String or a workflow expression. Contains the username Username string `json:"username" validate:"required"` // Password String or a workflow expression. Contains the user password @@ -188,7 +188,7 @@ func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { // BearerAuthProperties Bearer auth information type BearerAuthProperties struct { - BaseAuthProperties + BaseAuthProperties `json:",inline"` // Token String or a workflow expression. Contains the token Token string `json:"token" validate:"required"` } @@ -218,7 +218,7 @@ func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { // OAuth2AuthProperties OAuth2 information type OAuth2AuthProperties struct { - BaseAuthProperties + BaseAuthProperties `json:",inline"` // Authority String or a workflow expression. Contains the authority information Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` // GrantType Defines the grant type diff --git a/model/function.go b/model/function.go index e3877fe..9f69c1a 100644 --- a/model/function.go +++ b/model/function.go @@ -42,7 +42,7 @@ type FunctionType string // Function ... type Function struct { - Common + Common `json:",inline"` // Unique function name Name string `json:"name" validate:"required"` // If type is `rest`, #. If type is `rpc`, ##. diff --git a/model/object.go b/model/object.go index 17895d3..f37e254 100644 --- a/model/object.go +++ b/model/object.go @@ -33,7 +33,7 @@ import ( // parsed to int32 // - raw - holds any not typed value, replaces the interface{} behavior. type Object struct { - IObject + IObject `json:",inline"` } // IObject interface that can converted into one of the three subtypes diff --git a/model/states.go b/model/states.go index b2523b6..fc6df11 100644 --- a/model/states.go +++ b/model/states.go @@ -14,6 +14,9 @@ package model +// StateType ... +type StateType string + const ( // StateTypeDelay ... StateTypeDelay = "delay" @@ -59,9 +62,6 @@ func getActionsModelMapping(stateType string) (State, bool) { return nil, false } -// StateType ... -type StateType string - // State definition for a Workflow state type State interface { GetID() string diff --git a/model/workflow.go b/model/workflow.go index e6a9d00..85df8e0 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -592,7 +592,6 @@ type ProduceEvent struct { // TODO Data string `json:"data,omitempty"` // Add additional event extension context attributes - // TODO map[string]interface{} ContextAttributes map[string]string `json:"contextAttributes,omitempty"` } From e5760e4cdff7d348bac23e3dccb29c61842a407c Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 18 Jan 2023 16:25:52 -0300 Subject: [PATCH 049/110] Update readme to 2.2.2 version --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bf998fd..0f3870b 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.2.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.2) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 9c2ef9cad55e35213692d2b1ccfb14e01cb8b477 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 7 Feb 2023 23:01:40 -0300 Subject: [PATCH 050/110] Fixes unsupported AST kind *ast.InterfaceType on the custon Object type (#139) Signed-off-by: Spolti --- hack/deepcopy-gen.sh | 2 +- model/object.go | 152 ++++++++++----------------------- model/parallel_state.go | 8 +- model/zz_generated.deepcopy.go | 6 +- 4 files changed, 55 insertions(+), 113 deletions(-) diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh index 353a682..2ef0fdf 100755 --- a/hack/deepcopy-gen.sh +++ b/hack/deepcopy-gen.sh @@ -41,7 +41,7 @@ if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then echo "Generating deepcopy funcs" export GO111MODULE=on # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/deepcopy-gen" -v 1 \ + "${GOPATH}/bin/deepcopy-gen" -v 2 \ --input-dirs ./model -O zz_generated.deepcopy \ --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" \ "$@" diff --git a/model/object.go b/model/object.go index f37e254..074b3dd 100644 --- a/model/object.go +++ b/model/object.go @@ -18,8 +18,6 @@ import ( "encoding/json" "fmt" "math" - "strconv" - "strings" ) // Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. @@ -32,126 +30,68 @@ import ( // - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is // parsed to int32 // - raw - holds any not typed value, replaces the interface{} behavior. +// +// +kubebuilder:validation:Type=object type Object struct { - IObject `json:",inline"` -} - -// IObject interface that can converted into one of the three subtypes -type IObject interface { - DeepCopyIObject() IObject -} - -// raw generic subtype -type raw struct { - IObject interface{} + Type Type `json:",inline"` + IntVal int32 `json:",inline"` + StrVal string `json:",inline"` + RawValue json.RawMessage `json:",inline"` } -func (o raw) DeepCopyIObject() IObject { - return o -} +type Type int64 -// Integer int32 type -type Integer int +const ( + Integer Type = iota + String + Raw +) -func (m Integer) DeepCopyIObject() IObject { - return m +func FromInt(val int) Object { + if val > math.MaxInt32 || val < math.MinInt32 { + fmt.Println(fmt.Errorf("value: %d overflows int32", val)) + } + return Object{Type: Integer, IntVal: int32(val)} } -// String string type -type String string - -func (m String) DeepCopyIObject() IObject { - return m +func FromString(val string) Object { + return Object{Type: String, StrVal: val} } -// MarshalJSON marshal the given json object into the respective Object subtype. -func (obj Object) MarshalJSON() ([]byte, error) { - switch val := obj.IObject.(type) { - case String: - return []byte(fmt.Sprintf(`%q`, val)), nil - case Integer: - return []byte(fmt.Sprintf(`%d`, val)), nil - case raw: - custom, err := json.Marshal(&struct { - raw - }{ - val, - }) - if err != nil { - return nil, err - } - - // remove the field name and the last '}' for marshalling purposes - st := strings.Replace(string(custom), "{\"IObject\":", "", 1) - st = strings.TrimSuffix(st, "}") - return []byte(st), nil - default: - return []byte(fmt.Sprintf("%+v", obj.IObject)), nil +func FromRaw(val interface{}) Object { + custom, err := json.Marshal(val) + if err != nil { + er := fmt.Errorf("failed to parse value to Raw: %w", err) + fmt.Println(er.Error()) + return Object{} } + return Object{Type: Raw, RawValue: custom} } // UnmarshalJSON ... func (obj *Object) UnmarshalJSON(data []byte) error { - var test interface{} - if err := json.Unmarshal(data, &test); err != nil { - return err + if data[0] == '"' { + obj.Type = String + return json.Unmarshal(data, &obj.StrVal) + } else if data[0] == '{' { + obj.Type = Raw + return json.Unmarshal(data, &obj.RawValue) } - switch val := test.(type) { - case string: - var strVal String - if err := json.Unmarshal(data, &strVal); err != nil { - return err - } - obj.IObject = strVal - return nil - - case map[string]interface{}: - var cstVal raw - if err := json.Unmarshal(data, &cstVal.IObject); err != nil { - return err - } - obj.IObject = cstVal - return nil - - default: - // json parses all not typed numbers as float64, let's enforce to int32 - if valInt, parseErr := strconv.Atoi(fmt.Sprint(val)); parseErr != nil { - return fmt.Errorf("falied to parse %d to int32: %w", valInt, parseErr) - } else { - var intVal Integer - if err := json.Unmarshal(data, &intVal); err != nil { - return err - } - obj.IObject = intVal - return nil - } - } -} - -// FromInt creates an Object with an int32 value. -func FromInt(val int) Object { - if val > math.MaxInt32 || val < math.MinInt32 { - panic(fmt.Errorf("value: %d overflows int32", val)) - } - return Object{Integer(int32(val))} -} - -// FromString creates an Object with a string value. -func FromString(val string) Object { - return Object{String(val)} + obj.Type = Integer + return json.Unmarshal(data, &obj.IntVal) } -// FromRaw creates an Object with untyped values. -func FromRaw(val interface{}) Object { - var rawVal Object - data, err := json.Marshal(val) - if err != nil { - panic(err) - } - var cstVal raw - if err := json.Unmarshal(data, &cstVal.IObject); err != nil { - panic(err) +// MarshalJSON marshal the given json object into the respective Object subtype. +func (obj Object) MarshalJSON() ([]byte, error) { + switch obj.Type { + case String: + return []byte(fmt.Sprintf(`%q`, obj.StrVal)), nil + case Integer: + return []byte(fmt.Sprintf(`%d`, obj.IntVal)), nil + case Raw: + val, _ := json.Marshal(obj.RawValue) + return val, nil + default: + return []byte(fmt.Sprintf("%+v", obj)), nil } - rawVal.IObject = cstVal - return rawVal } diff --git a/model/parallel_state.go b/model/parallel_state.go index f943431..7e7ec83 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -54,14 +54,14 @@ type ParallelState struct { Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` } -func (s *ParallelState) DeepCopyState() State { - return s +func (ps *ParallelState) DeepCopyState() State { + return ps } type parallelStateForUnmarshal ParallelState // UnmarshalJSON unmarshal ParallelState object from json bytes -func (s *ParallelState) UnmarshalJSON(b []byte) error { +func (ps *ParallelState) UnmarshalJSON(b []byte) error { if len(b) == 0 { // TODO: Normalize error messages return fmt.Errorf("no bytes to unmarshal") @@ -75,7 +75,7 @@ func (s *ParallelState) UnmarshalJSON(b []byte) error { return err } - *s = ParallelState(*v) + *ps = ParallelState(*v) return nil } diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 872570d..6117092 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -967,8 +967,10 @@ func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Object) DeepCopyInto(out *Object) { *out = *in - if in.IObject != nil { - out.IObject = in.IObject.DeepCopyIObject() + if in.RawValue != nil { + in, out := &in.RawValue, &out.RawValue + *out = make(json.RawMessage, len(*in)) + copy(*out, *in) } return } From 3433ba265d3646431d0ecf72aa2dc2f3028e0714 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 14 Feb 2023 12:13:36 -0300 Subject: [PATCH 051/110] Fixes unsupported AST kind *ast.InterfaceType on the custon AuthProperties type (#143) Signed-off-by: Spolti --- hack/deepcopy-gen.sh | 2 +- model/auth.go | 157 +++++++++++++++------------------ model/auth_test.go | 8 +- model/zz_generated.deepcopy.go | 34 ++++--- parser/parser_test.go | 8 +- 5 files changed, 101 insertions(+), 108 deletions(-) diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh index 2ef0fdf..353a682 100755 --- a/hack/deepcopy-gen.sh +++ b/hack/deepcopy-gen.sh @@ -41,7 +41,7 @@ if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then echo "Generating deepcopy funcs" export GO111MODULE=on # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/deepcopy-gen" -v 2 \ + "${GOPATH}/bin/deepcopy-gen" -v 1 \ --input-dirs ./model -O zz_generated.deepcopy \ --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" \ "$@" diff --git a/model/auth.go b/model/auth.go index decef7c..e885eae 100644 --- a/model/auth.go +++ b/model/auth.go @@ -17,6 +17,7 @@ package model import ( "encoding/json" "fmt" + "strings" ) // AuthType ... @@ -43,18 +44,6 @@ const ( GrantTypeTokenExchange GrantType = "tokenExchange" ) -func getAuthProperties(authType AuthType) (AuthProperties, bool) { - switch authType { - case AuthTypeBasic: - return &BasicAuthProperties{}, true - case AuthTypeBearer: - return &BearerAuthProperties{}, true - case AuthTypeOAuth2: - return &OAuth2AuthProperties{}, true - } - return nil, false -} - // Auth ... type Auth struct { // Name Unique auth definition name @@ -90,74 +79,73 @@ func (a *Auth) UnmarshalJSON(data []byte) error { if len(a.Scheme) == 0 { a.Scheme = AuthTypeBasic } - authProperties, ok := getAuthProperties(a.Scheme) - if !ok { - return fmt.Errorf("authentication scheme %s not supported", a.Scheme) - } - // we take the type we want to unmarshal based on the scheme - if err := unmarshalKey("properties", auth, authProperties); err != nil { - return err - } + switch a.Scheme { + case AuthTypeBasic: + authProperties := &BasicAuthProperties{} - a.Properties = authProperties - return nil -} + if err := unmarshalKey("properties", auth, authProperties); err != nil { + return err + } + a.Properties.Basic = authProperties -// AuthProperties ... -type AuthProperties interface { - // GetMetadata ... - GetMetadata() *Metadata - // GetSecret ... - GetSecret() string - // DeepCopyAuthProperties fixes in.Properties.DeepCopyAuthProperties undefined (type AuthProperties has no - // field or method DeepCopyAuthProperties) - DeepCopyAuthProperties() AuthProperties -} + return nil -// BaseAuthProperties ... -type BaseAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - Secret string `json:"secret,omitempty"` -} + case AuthTypeBearer: + authProperties := &BearerAuthProperties{} + if err := unmarshalKey("properties", auth, authProperties); err != nil { + return err + } + a.Properties.Bearer = authProperties + return nil -// UnmarshalJSON ... -func (b *BaseAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - b.Secret, err = unmarshalString(data) - if err != nil { + case AuthTypeOAuth2: + authProperties := &OAuth2AuthProperties{} + if err := unmarshalKey("properties", auth, authProperties); err != nil { return err } + a.Properties.OAuth2 = authProperties return nil - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - if err := unmarshalKey("secret", properties, &b.Secret); err != nil { - return err - } - return nil -} -// GetMetadata ... -func (b *BaseAuthProperties) GetMetadata() *Metadata { - return &b.Metadata + default: + return fmt.Errorf("failed to parse auth properties") + } } -// GetSecret ... -func (b *BaseAuthProperties) GetSecret() string { - return b.Secret +func (a *Auth) MarshalJSON() ([]byte, error) { + custom, err := json.Marshal(&struct { + Name string `json:"name" validate:"required"` + Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` + Properties AuthProperties `json:"properties" validate:"required"` + }{ + Name: a.Name, + Scheme: a.Scheme, + Properties: a.Properties, + }) + if err != nil { + fmt.Println(err) + } + st := strings.Replace(string(custom), "null,", "", 1) + st = strings.Replace(st, "\"Basic\":", "", 1) + st = strings.Replace(st, "\"Oauth2\":", "", 1) + st = strings.Replace(st, "\"Bearer\":", "", 1) + st = strings.Replace(st, "{{", "{", 1) + st = strings.TrimSuffix(st, "}") + return []byte(st), nil } -func (b *BasicAuthProperties) DeepCopyAuthProperties() AuthProperties { - return b +// AuthProperties ... +type AuthProperties struct { + Basic *BasicAuthProperties `json:",omitempty"` + Bearer *BearerAuthProperties `json:",omitempty"` + OAuth2 *OAuth2AuthProperties `json:",omitempty"` } // BasicAuthProperties Basic Auth Info type BasicAuthProperties struct { - BaseAuthProperties `json:",inline"` + Common `json:",inline"` + // Secret Expression referencing a workflow secret that contains all needed auth info + Secret string `json:"secret,omitempty"` // Username String or a workflow expression. Contains the username Username string `json:"username" validate:"required"` // Password String or a workflow expression. Contains the user password @@ -168,11 +156,7 @@ type BasicAuthProperties struct { func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { properties := make(map[string]json.RawMessage) if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil + return err } if err := unmarshalKey("username", properties, &b.Username); err != nil { return err @@ -183,29 +167,26 @@ func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { return err } + if err := unmarshalKey("secret", properties, &b.Secret); err != nil { + return err + } return nil } // BearerAuthProperties Bearer auth information type BearerAuthProperties struct { - BaseAuthProperties `json:",inline"` + Common `json:",inline"` + // Secret Expression referencing a workflow secret that contains all needed auth info + Secret string `json:"secret,omitempty"` // Token String or a workflow expression. Contains the token Token string `json:"token" validate:"required"` } -func (b *BearerAuthProperties) DeepCopyAuthProperties() AuthProperties { - return b -} - // UnmarshalJSON ... func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { properties := make(map[string]json.RawMessage) if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil + return err } if err := unmarshalKey("token", properties, &b.Token); err != nil { return err @@ -213,12 +194,17 @@ func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { return err } + if err := unmarshalKey("secret", properties, &b.Secret); err != nil { + return err + } return nil } // OAuth2AuthProperties OAuth2 information type OAuth2AuthProperties struct { - BaseAuthProperties `json:",inline"` + Common `json:",inline"` + // Secret Expression referencing a workflow secret that contains all needed auth info + Secret string `json:"secret,omitempty"` // Authority String or a workflow expression. Contains the authority information Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` // GrantType Defines the grant type @@ -243,21 +229,13 @@ type OAuth2AuthProperties struct { RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` } -func (b *OAuth2AuthProperties) DeepCopyAuthProperties() AuthProperties { - return b -} - // TODO: use reflection to unmarshal the keys and think on a generic approach to handle them // UnmarshalJSON ... func (b *OAuth2AuthProperties) UnmarshalJSON(data []byte) error { properties := make(map[string]json.RawMessage) if err := json.Unmarshal(data, &properties); err != nil { - err = json.Unmarshal(data, &b.BaseAuthProperties) - if err != nil { - return err - } - return nil + return err } if err := unmarshalKey("authority", properties, &b.Authority); err != nil { return err @@ -295,5 +273,8 @@ func (b *OAuth2AuthProperties) UnmarshalJSON(data []byte) error { if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { return err } + if err := unmarshalKey("secret", properties, &b.Secret); err != nil { + return err + } return nil } diff --git a/model/auth_test.go b/model/auth_test.go index 44d49e1..6010dca 100644 --- a/model/auth_test.go +++ b/model/auth_test.go @@ -46,8 +46,8 @@ func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { err = json.Unmarshal([]byte(a2JSON), &a2) assert.NoError(t, err) - a1Properties := a1.Properties.(*BearerAuthProperties) - a2Properties := a2.Properties.(*BearerAuthProperties) + a1Properties := a1.Properties.Bearer + a2Properties := a2.Properties.Bearer assert.Equal(t, "token1", a1Properties.Token) assert.Equal(t, "token2", a2Properties.Token) @@ -79,8 +79,8 @@ func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { err = json.Unmarshal([]byte(a2JSON), &a2) assert.NoError(t, err) - a1Properties := a1.Properties.(*OAuth2AuthProperties) - a2Properties := a2.Properties.(*OAuth2AuthProperties) + a1Properties := a1.Properties.OAuth2 + a2Properties := a2.Properties.OAuth2 assert.Equal(t, "secret1", a1Properties.ClientSecret) assert.Equal(t, "secret2", a2Properties.ClientSecret) diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 6117092..05803fb 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -87,9 +87,7 @@ func (in *ActionDataFilter) DeepCopy() *ActionDataFilter { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Auth) DeepCopyInto(out *Auth) { *out = *in - if in.Properties != nil { - out.Properties = in.Properties.DeepCopyAuthProperties() - } + in.Properties.DeepCopyInto(&out.Properties) return } @@ -126,18 +124,32 @@ func (in AuthArray) DeepCopy() AuthArray { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseAuthProperties) DeepCopyInto(out *BaseAuthProperties) { +func (in *AuthProperties) DeepCopyInto(out *AuthProperties) { *out = *in - in.Common.DeepCopyInto(&out.Common) + if in.Basic != nil { + in, out := &in.Basic, &out.Basic + *out = new(BasicAuthProperties) + (*in).DeepCopyInto(*out) + } + if in.Bearer != nil { + in, out := &in.Bearer, &out.Bearer + *out = new(BearerAuthProperties) + (*in).DeepCopyInto(*out) + } + if in.OAuth2 != nil { + in, out := &in.OAuth2, &out.OAuth2 + *out = new(OAuth2AuthProperties) + (*in).DeepCopyInto(*out) + } return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseAuthProperties. -func (in *BaseAuthProperties) DeepCopy() *BaseAuthProperties { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthProperties. +func (in *AuthProperties) DeepCopy() *AuthProperties { if in == nil { return nil } - out := new(BaseAuthProperties) + out := new(AuthProperties) in.DeepCopyInto(out) return out } @@ -259,7 +271,7 @@ func (in *BaseWorkflow) DeepCopy() *BaseWorkflow { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BasicAuthProperties) DeepCopyInto(out *BasicAuthProperties) { *out = *in - in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + in.Common.DeepCopyInto(&out.Common) return } @@ -276,7 +288,7 @@ func (in *BasicAuthProperties) DeepCopy() *BasicAuthProperties { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BearerAuthProperties) DeepCopyInto(out *BearerAuthProperties) { *out = *in - in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + in.Common.DeepCopyInto(&out.Common) return } @@ -940,7 +952,7 @@ func (in Metadata) DeepCopy() Metadata { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *OAuth2AuthProperties) DeepCopyInto(out *OAuth2AuthProperties) { *out = *in - in.BaseAuthProperties.DeepCopyInto(&out.BaseAuthProperties) + in.Common.DeepCopyInto(&out.Common) if in.Scopes != nil { in, out := &in.Scopes, &out.Scopes *out = make([]string, len(*in)) diff --git a/parser/parser_test.go b/parser/parser_test.go index 3ce6a60..2c7c3ee 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -181,7 +181,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, len(auth), 1) assert.Equal(t, "testAuth", auth[0].Name) assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.(*model.BearerAuthProperties).Token + bearerProperties := auth[0].Properties.Bearer.Token assert.Equal(t, "test_token", bearerProperties) }, }, { @@ -204,16 +204,16 @@ func TestFromFile(t *testing.T) { assert.Equal(t, len(auth), 2) assert.Equal(t, "testAuth", auth[0].Name) assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.(*model.BearerAuthProperties).Token + bearerProperties := auth[0].Properties.Bearer.Token assert.Equal(t, "test_token", bearerProperties) assert.Equal(t, "testAuth2", auth[1].Name) assert.Equal(t, model.AuthTypeBasic, auth[1].Scheme) - basicProperties := auth[1].Properties.(*model.BasicAuthProperties) + basicProperties := auth[1].Properties.Basic assert.Equal(t, "test_user", basicProperties.Username) assert.Equal(t, "test_pwd", basicProperties.Password) // metadata assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) - assert.Equal(t, &model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.GetMetadata()) + assert.Equal(t, model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.Bearer.Metadata) }, }, { "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { From 8994e2bb3440308f4cc48bbfbe229ef571daa640 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Wed, 15 Feb 2023 08:42:22 -0300 Subject: [PATCH 052/110] json tag missing on Event.Metadata (#145) Signed-off-by: Spolti --- model/event.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/event.go b/model/event.go index 9fb205e..ef442d0 100644 --- a/model/event.go +++ b/model/event.go @@ -48,7 +48,7 @@ func EventStructLevelValidation(structLevel validator.StructLevel) { // Event used to define events and their correlations type Event struct { - Common + Common `json:",inline"` // Unique event name Name string `json:"name" validate:"required"` // CloudEvent source From 7ee93e0ed7da913c143a6dbfaf725bcfbe168d09 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 7 Mar 2023 10:10:18 -0300 Subject: [PATCH 053/110] Upgrade codecov, fix currentProjectPath when running outside regular path (#149) Signed-off-by: Ricardo Zanini --- .github/workflows/Go-SDK-PR-Check.yaml | 5 +++-- hack/boilerplate.txt | 2 +- model/zz_generated.deepcopy.go | 2 +- test/path.go | 7 ++++++- tools.mod | 4 ++-- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index ebe5592..6d204af 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -78,9 +78,10 @@ jobs: run: | go test ./... -coverprofile test_coverage.out -covermode=atomic - name: Upload results to codecov - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 with: - file: ./test_coverage.out + files: ./test_coverage.out flags: sdk-go name: sdk-go fail_ci_if_error: true + verbose: true diff --git a/hack/boilerplate.txt b/hack/boilerplate.txt index 6a7425e..3b2e6c3 100644 --- a/hack/boilerplate.txt +++ b/hack/boilerplate.txt @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2023 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 05803fb..e53eb76 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -1,7 +1,7 @@ //go:build !ignore_autogenerated // +build !ignore_autogenerated -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2023 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/test/path.go b/test/path.go index e9ff5e4..69c7113 100644 --- a/test/path.go +++ b/test/path.go @@ -18,6 +18,7 @@ import ( "os" "path/filepath" "runtime" + "strings" "github.com/pkg/errors" ) @@ -49,5 +50,9 @@ func CurrentProjectPath() string { func currentFilePath() string { _, file, _, _ := runtime.Caller(1) - return file + if strings.HasSuffix(file, "/") { + return file + } + println("Returning an empty string for currentFilePath since it's not a caller path: " + file) + return "" } diff --git a/tools.mod b/tools.mod index a5c10f5..69ff48c 100644 --- a/tools.mod +++ b/tools.mod @@ -1,6 +1,6 @@ -module github.com/serverlessworkflow/sdk-go +module github.com/serverlessworkflow/sdk-go/v2 -go 1.14 +go 1.19 require ( github.com/google/addlicense v0.0.0-20210428195630-6d92264d7170 // indirect From 923dcdf6fe34a6e9080359dbe93ec828e4e1c9ae Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 7 Mar 2023 11:01:15 -0300 Subject: [PATCH 054/110] states - unsupported AST kind *ast.InterfaceType (#148) * states - unsupported AST kind *ast.InterfaceType Fixes #144 Signed-off-by: Spolti * rebase and review suggestions Signed-off-by: Spolti --------- Signed-off-by: Spolti --- model/action.go | 2 +- model/callback_state.go | 21 +- model/callback_state_test.go | 93 +++ model/delay_state.go | 12 +- model/delay_state_test.go | 24 +- model/event.go | 2 +- model/event_data_filter.go | 8 +- model/event_state.go | 14 +- model/event_state_test.go | 60 +- model/foreach_state.go | 16 +- model/foreach_state_test.go | 120 ++-- model/inject_state.go | 17 +- model/operation_state.go | 17 +- model/parallel_state.go | 14 +- model/parallel_state_test.go | 98 +-- model/sleep_state.go | 18 +- model/sleep_state_test.go | 16 +- model/states.go | 282 ++++++-- model/switch_state.go | 23 +- model/switch_state_test.go | 94 +-- model/util.go | 3 +- model/workflow.go | 21 +- model/zz_generated.deepcopy.go | 99 ++- parser/parser_test.go | 638 +++++++++++------- .../workflows/greetings-v08-spec.sw.yaml | 26 +- parser/testdata/workflows/greetings.sw.yaml | 1 + 26 files changed, 1143 insertions(+), 596 deletions(-) create mode 100644 model/callback_state_test.go diff --git a/model/action.go b/model/action.go index f8773ef..f33bcaf 100644 --- a/model/action.go +++ b/model/action.go @@ -33,7 +33,7 @@ type Action struct { // References a sub-workflow to be executed SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` // Sleep Defines time period workflow execution should sleep before / after function execution - Sleep Sleep `json:"sleep,omitempty"` + Sleep *Sleep `json:"sleep,omitempty"` // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed RetryRef string `json:"retryRef,omitempty"` // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` diff --git a/model/callback_state.go b/model/callback_state.go index 04efa07..5a05f8a 100644 --- a/model/callback_state.go +++ b/model/callback_state.go @@ -14,22 +14,33 @@ package model +import ( + "encoding/json" +) + // CallbackState executes a function and waits for callback event that indicates // completion of the task. type CallbackState struct { - BaseState // Defines the action to be executed Action Action `json:"action" validate:"required"` // References a unique callback event name in the defined workflow events EventRef string `json:"eventRef" validate:"required"` // Time period to wait for incoming events (ISO 8601 format) - Timeouts *CallbackStateTimeout `json:"timeouts" validate:"omitempty"` + Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` // Event data filter - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` + EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` } -func (in *CallbackState) DeepCopyState() State { - return in +func (c *CallbackState) MarshalJSON() ([]byte, error) { + type Alias CallbackState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(c), + Timeouts: c.Timeouts, + }) + return custom, err } // CallbackStateTimeout defines timeout settings for callback state diff --git a/model/callback_state_test.go b/model/callback_state_test.go new file mode 100644 index 0000000..9566d86 --- /dev/null +++ b/model/callback_state_test.go @@ -0,0 +1,93 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func TestCallbackStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + callbackStateObj State + err string + } + testCases := []testCase{ + { + desp: "normal", + callbackStateObj: State{ + BaseState: BaseState{ + Name: "callbackTest", + Type: StateTypeCallback, + }, + CallbackState: &CallbackState{ + Action: Action{ + ID: "1", + Name: "action1", + }, + EventRef: "refExample", + }, + }, + err: ``, + }, + { + desp: "missing required EventRef", + callbackStateObj: State{ + BaseState: BaseState{ + Name: "callbackTest", + Type: StateTypeCallback, + }, + CallbackState: &CallbackState{ + Action: Action{ + ID: "1", + Name: "action1", + }, + }, + }, + err: `Key: 'State.CallbackState.EventRef' Error:Field validation for 'EventRef' failed on the 'required' tag`, + }, + // TODO need to register custom types - will be fixed by https://github.com/serverlessworkflow/sdk-go/issues/151 + //{ + // desp: "missing required Action", + // callbackStateObj: State{ + // BaseState: BaseState{ + // Name: "callbackTest", + // Type: StateTypeCallback, + // }, + // CallbackState: &CallbackState{ + // EventRef: "refExample", + // }, + // }, + // err: `Key: 'State.CallbackState.Action' Error:Field validation for 'Action' failed on the 'required' tag`, + //}, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(&tc.callbackStateObj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/delay_state.go b/model/delay_state.go index 33db874..942216a 100644 --- a/model/delay_state.go +++ b/model/delay_state.go @@ -14,13 +14,19 @@ package model +import "encoding/json" + // DelayState Causes the workflow execution to delay for a specified duration type DelayState struct { - BaseState // Amount of time (ISO 8601 format) to delay TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` } -func (in *DelayState) DeepCopyState() State { - return in +func (a *DelayState) MarshalJSON() ([]byte, error) { + custom, err := json.Marshal(&struct { + TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` + }{ + TimeDelay: a.TimeDelay, + }) + return custom, err } diff --git a/model/delay_state_test.go b/model/delay_state_test.go index 258e143..5521e03 100644 --- a/model/delay_state_test.go +++ b/model/delay_state_test.go @@ -25,42 +25,48 @@ import ( func TestDelayStateStructLevelValidation(t *testing.T) { type testCase struct { desp string - delayStateObj DelayState + delayStateObj State err string } testCases := []testCase{ { desp: "normal", - delayStateObj: DelayState{ + delayStateObj: State{ BaseState: BaseState{ Name: "1", Type: "delay", }, - TimeDelay: "PT5S", + DelayState: &DelayState{ + TimeDelay: "PT5S", + }, }, err: ``, }, { desp: "missing required timeDelay", - delayStateObj: DelayState{ + delayStateObj: State{ BaseState: BaseState{ Name: "1", Type: "delay", }, - TimeDelay: "", + DelayState: &DelayState{ + TimeDelay: "", + }, }, - err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag`, + err: `Key: 'State.DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag`, }, { desp: "invalid timeDelay duration", - delayStateObj: DelayState{ + delayStateObj: State{ BaseState: BaseState{ Name: "1", Type: "delay", }, - TimeDelay: "P5S", + DelayState: &DelayState{ + TimeDelay: "P5S", + }, }, - err: `Key: 'DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'iso8601duration' tag`, + err: `Key: 'State.DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'iso8601duration' tag`, }, } for _, tc := range testCases { diff --git a/model/event.go b/model/event.go index ef442d0..98d3f59 100644 --- a/model/event.go +++ b/model/event.go @@ -100,7 +100,7 @@ type EventRef struct { ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. - Data Object `json:"data,omitempty"` + Data *Object `json:"data,omitempty"` // Add additional extension context attributes to the produced event ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` // Invoke specifies if the subflow should be invoked sync or async. diff --git a/model/event_data_filter.go b/model/event_data_filter.go index 5193a4d..2f5b093 100644 --- a/model/event_data_filter.go +++ b/model/event_data_filter.go @@ -22,13 +22,13 @@ import ( // EventDataFilter used to filter consumed event payloads. type EventDataFilter struct { - // UseData represent where event payload is added/merged to state data. If it's false, data & toStateData should be ignored. - // Defaults to true. + // UseData represent where event payload is added/merged to state data. If it's false, data & toStateData + // should be ignored. Defaults to true. UseData bool `json:"useData,omitempty"` - // Workflow expression that filters of the event data (payload) Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the event payload should be added/merged into. If not specified, denotes, the top-level state data element. + // Workflow expression that selects a state data element to which the event payload should be added/merged into. + // If not specified, denotes, the top-level state data element. ToStateData string `json:"toStateData,omitempty"` } diff --git a/model/event_state.go b/model/event_state.go index f0fd896..aeb3e59 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -22,7 +22,6 @@ import ( // EventState used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel type EventState struct { // TODO: EventState doesn't have usedForCompensation field. - BaseState // If true consuming one of the defined events causes its associated actions to be performed. // If false all the defined events must be consumed in order for actions to be performed @@ -34,8 +33,16 @@ type EventState struct { Timeouts *EventStateTimeout `json:"timeouts,omitempty"` } -func (e *EventState) DeepCopyState() State { - return e +func (e *EventState) MarshalJSON() ([]byte, error) { + type Alias EventState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *EventStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(e), + Timeouts: e.Timeouts, + }) + return custom, err } type eventStateForUnmarshal EventState @@ -81,6 +88,7 @@ func (o *OnEvents) UnmarshalJSON(data []byte) error { } *o = OnEvents(v) + return nil } diff --git a/model/event_state_test.go b/model/event_state_test.go index 33b3b80..348aaea 100644 --- a/model/event_state_test.go +++ b/model/event_state_test.go @@ -25,30 +25,32 @@ func TestEventStateUnmarshalJSON(t *testing.T) { type testCase struct { desp string data string - expect EventState + expect State err string } testCases := []testCase{ { desp: "all fields set", - data: `{"name": "1", "Type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: EventState{ + data: `{"name": "1", "type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, + expect: State{ BaseState: BaseState{ Name: "1", Type: StateTypeEvent, }, - Exclusive: false, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", + EventState: &EventState{ + Exclusive: false, + OnEvents: []OnEvents{ + { + EventRefs: []string{"E1", "E2"}, + ActionMode: "parallel", + }, }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", + Timeouts: &EventStateTimeout{ + EventTimeout: "PT5M", + ActionExecTimeout: "PT5M", + StateExecTimeout: &StateExecTimeout{ + Total: "PT5M", + }, }, }, }, @@ -56,24 +58,26 @@ func TestEventStateUnmarshalJSON(t *testing.T) { }, { desp: "default exclusive", - data: `{"name": "1", "Type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: EventState{ + data: `{"name": "1", "type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, + expect: State{ BaseState: BaseState{ Name: "1", Type: StateTypeEvent, }, - Exclusive: true, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", + EventState: &EventState{ + Exclusive: true, + OnEvents: []OnEvents{ + { + EventRefs: []string{"E1", "E2"}, + ActionMode: "parallel", + }, }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", + Timeouts: &EventStateTimeout{ + EventTimeout: "PT5M", + ActionExecTimeout: "PT5M", + StateExecTimeout: &StateExecTimeout{ + Total: "PT5M", + }, }, }, }, @@ -82,7 +86,7 @@ func TestEventStateUnmarshalJSON(t *testing.T) { } for _, tc := range testCases { t.Run(tc.desp, func(t *testing.T) { - v := EventState{} + v := State{} err := json.Unmarshal([]byte(tc.data), &v) if tc.err != "" { diff --git a/model/foreach_state.go b/model/foreach_state.go index f35e609..b3ef13e 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -43,7 +43,6 @@ const ( // ForEachState used to execute actions for each element of a data set. type ForEachState struct { - BaseState // Workflow expression selecting an array element of the states data InputCollection string `json:"inputCollection" validate:"required"` // Workflow expression specifying an array element of the states data to add the results of each iteration @@ -56,13 +55,20 @@ type ForEachState struct { Actions []Action `json:"actions,omitempty" validate:"required,min=1,dive"` // State specific timeout Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Mode Specifies how iterations are to be performed (sequentially or in parallel) - // Defaults to parallel + // Mode Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel Mode ForEachModeType `json:"mode,omitempty"` } -func (f *ForEachState) DeepCopyState() State { - return f +func (f *ForEachState) MarshalJSON() ([]byte, error) { + type Alias ForEachState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(f), + Timeouts: f.Timeouts, + }) + return custom, err } type forEachStateForUnmarshal ForEachState diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go index 6eacde0..3dcb3f8 100644 --- a/model/foreach_state_test.go +++ b/model/foreach_state_test.go @@ -75,119 +75,131 @@ func TestForEachStateUnmarshalJSON(t *testing.T) { func TestForEachStateStructLevelValidation(t *testing.T) { type testCase struct { desp string - state ForEachState + state State err string } testCases := []testCase{ { desp: "normal test & sequential", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeSequential, }, - Mode: ForEachModeTypeSequential, }, err: ``, }, { desp: "normal test & parallel int", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 1, + }, }, }, err: ``, }, { desp: "normal test & parallel string", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "1", + }, }, }, err: ``, }, { desp: "invalid parallel int", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 0, + }, }, }, - err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, }, { desp: "invalid parallel string", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "0", + }, }, }, - err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, }, { desp: "invalid parallel string format", - state: ForEachState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "2", }, - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "a", + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "a", + }, }, }, - err: `Key: 'ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, }, } for _, tc := range testCases { diff --git a/model/inject_state.go b/model/inject_state.go index 91544e6..681ac63 100644 --- a/model/inject_state.go +++ b/model/inject_state.go @@ -14,17 +14,28 @@ package model +import ( + "encoding/json" +) + // InjectState used to inject static data into state data input. type InjectState struct { - BaseState // JSON object which can be set as states data input and can be manipulated via filters Data map[string]Object `json:"data" validate:"required,min=1"` // State specific timeouts Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` } -func (in *InjectState) DeepCopyState() State { - return in +func (i *InjectState) MarshalJSON() ([]byte, error) { + type Alias InjectState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(i), + Timeouts: i.Timeouts, + }) + return custom, err } // InjectStateTimeout defines timeout settings for inject state diff --git a/model/operation_state.go b/model/operation_state.go index 1a74e22..388adfe 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -20,9 +20,7 @@ import ( // OperationState defines a set of actions to be performed in sequence or in parallel. type OperationState struct { - BaseState - // Specifies whether actions are performed in sequence or in parallel - // Defaults to sequential + // Specifies whether actions are performed in sequence or in parallel, defaults to sequential ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` // Actions to be performed Actions []Action `json:"actions" validate:"required,min=1,dive"` @@ -30,14 +28,23 @@ type OperationState struct { Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` } -func (o *OperationState) DeepCopyState() State { - return o +func (a *OperationState) MarshalJSON() ([]byte, error) { + type Alias OperationState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(a), + Timeouts: a.Timeouts, + }) + return custom, err } type operationStateForUnmarshal OperationState // UnmarshalJSON unmarshal OperationState object from json bytes func (o *OperationState) UnmarshalJSON(data []byte) error { + v := operationStateForUnmarshal{ ActionMode: ActionModeSequential, } diff --git a/model/parallel_state.go b/model/parallel_state.go index 7e7ec83..e512ffa 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -40,13 +40,11 @@ const ( // ParallelState Consists of a number of states that are executed in parallel type ParallelState struct { - BaseState // Branch Definitions Branches []Branch `json:"branches" validate:"required,min=1,dive"` // Option types on how to complete branch execution. // Defaults to `allOf` CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneof=allOf atLeast"` - // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." // TODO: change this field to unmarshal result as int NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` @@ -54,8 +52,16 @@ type ParallelState struct { Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` } -func (ps *ParallelState) DeepCopyState() State { - return ps +func (p *ParallelState) MarshalJSON() ([]byte, error) { + type Alias ParallelState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(p), + Timeouts: p.Timeouts, + }) + return custom, err } type parallelStateForUnmarshal ParallelState diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go index 49daf42..c824d3b 100644 --- a/model/parallel_state_test.go +++ b/model/parallel_state_test.go @@ -71,108 +71,118 @@ func TestParallelStateUnmarshalJSON(t *testing.T) { func TestParallelStateStructLevelValidation(t *testing.T) { type testCase struct { desp string - state *ParallelState + state *State err string } testCases := []testCase{ { desp: "normal", - state: &ParallelState{ + state: &State{ BaseState: BaseState{ Name: "1", Type: "parallel", }, - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, }, }, + CompletionType: CompletionTypeAllOf, + NumCompleted: intstr.FromInt(1), }, - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), }, err: ``, }, { desp: "invalid completeType", - state: &ParallelState{ + state: &State{ BaseState: BaseState{ Name: "1", Type: "parallel", }, - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, }, }, + CompletionType: CompletionTypeAllOf + "1", }, - CompletionType: CompletionTypeAllOf + "1", }, - err: `Key: 'ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, + err: `Key: 'State.ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, }, { desp: "invalid numCompleted `int`", - state: &ParallelState{ + state: &State{ BaseState: BaseState{ Name: "1", Type: "parallel", }, - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, }, }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromInt(0), }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromInt(0), }, - err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, }, { desp: "invalid numCompleted string format", - state: &ParallelState{ + state: &State{ BaseState: BaseState{ Name: "1", Type: "parallel", }, - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, }, }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("a"), }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("a"), }, - err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, }, { desp: "normal", - state: &ParallelState{ + state: &State{ BaseState: BaseState{ Name: "1", Type: "parallel", }, - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, }, }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("0"), }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("0"), }, - err: `Key: 'ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, }, } for _, tc := range testCases { diff --git a/model/sleep_state.go b/model/sleep_state.go index c7d2fd4..7eddd41 100644 --- a/model/sleep_state.go +++ b/model/sleep_state.go @@ -14,18 +14,28 @@ package model +import ( + "encoding/json" +) + // SleepState suspends workflow execution for a given time duration. type SleepState struct { - BaseState - // Duration (ISO 8601 duration format) to sleep Duration string `json:"duration" validate:"required,iso8601duration"` // Timeouts State specific timeouts Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` } -func (in *SleepState) DeepCopyState() State { - return in +func (s *SleepState) MarshalJSON() ([]byte, error) { + type Alias SleepState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(s), + Timeouts: s.Timeouts, + }) + return custom, err } // SleepStateTimeout defines timeout settings for sleep state diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go index 4fc531d..e6580df 100644 --- a/model/sleep_state_test.go +++ b/model/sleep_state_test.go @@ -25,31 +25,35 @@ import ( func TestSleepStateStructLevelValidation(t *testing.T) { type testCase struct { desp string - state SleepState + state State err string } testCases := []testCase{ { desp: "normal duration", - state: SleepState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "sleep", }, - Duration: "PT10S", + SleepState: &SleepState{ + Duration: "PT10S", + }, }, err: ``, }, { desp: "invalid duration", - state: SleepState{ + state: State{ BaseState: BaseState{ Name: "1", Type: "sleep", }, - Duration: "T10S", + SleepState: &SleepState{ + Duration: "T10S", + }, }, - err: `Key: 'SleepState.Duration' Error:Field validation for 'Duration' failed on the 'iso8601duration' tag`, + err: `Key: 'State.SleepState.Duration' Error:Field validation for 'Duration' failed on the 'iso8601duration' tag`, }, } diff --git a/model/states.go b/model/states.go index fc6df11..bc3c5df 100644 --- a/model/states.go +++ b/model/states.go @@ -14,70 +14,36 @@ package model +import ( + "encoding/json" + "fmt" + "strings" +) + // StateType ... type StateType string const ( // StateTypeDelay ... - StateTypeDelay = "delay" + StateTypeDelay StateType = "delay" // StateTypeEvent ... - StateTypeEvent = "event" + StateTypeEvent StateType = "event" // StateTypeOperation ... - StateTypeOperation = "operation" + StateTypeOperation StateType = "operation" // StateTypeParallel ... - StateTypeParallel = "parallel" + StateTypeParallel StateType = "parallel" // StateTypeSwitch ... - StateTypeSwitch = "switch" + StateTypeSwitch StateType = "switch" // StateTypeForEach ... - StateTypeForEach = "foreach" + StateTypeForEach StateType = "foreach" // StateTypeInject ... - StateTypeInject = "inject" + StateTypeInject StateType = "inject" // StateTypeCallback ... - StateTypeCallback = "callback" + StateTypeCallback StateType = "callback" // StateTypeSleep ... - StateTypeSleep = "sleep" + StateTypeSleep StateType = "sleep" ) -func getActionsModelMapping(stateType string) (State, bool) { - switch stateType { - case StateTypeDelay: - return &DelayState{}, true - case StateTypeEvent: - return &EventState{}, true - case StateTypeOperation: - return &OperationState{}, true - case StateTypeParallel: - return &ParallelState{}, true - case StateTypeSwitch: - return &SwitchState{}, true - case StateTypeInject: - return &InjectState{}, true - case StateTypeForEach: - return &ForEachState{}, true - case StateTypeCallback: - return &CallbackState{}, true - case StateTypeSleep: - return &SleepState{}, true - } - return nil, false -} - -// State definition for a Workflow state -type State interface { - GetID() string - GetName() string - GetType() StateType - GetOnErrors() []OnError - GetTransition() *Transition - GetStateDataFilter() *StateDataFilter - GetCompensatedBy() string - GetUsedForCompensation() bool - GetEnd() *End - GetMetadata() *Metadata - // DeepCopyState fixes undefined (type State has no field or method DeepCopyState) - DeepCopyState() State -} - // BaseState ... type BaseState struct { // Unique State id @@ -101,32 +67,210 @@ type BaseState struct { Metadata *Metadata `json:"metadata,omitempty"` } -// GetOnErrors ... -func (s *BaseState) GetOnErrors() []OnError { return s.OnErrors } +func (b *BaseState) MarshalJSON() ([]byte, error) { + type Alias BaseState + if b == nil { + return []byte("null"), nil + } + cus, err := json.Marshal(struct { + *Alias + }{ + Alias: (*Alias)(b), + }) + return cus, err +} + +func (b *BaseState) UnmarshalJSON(data []byte) error { + baseState := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &baseState); err != nil { + return err + } + if err := unmarshalKey("id", baseState, &b.ID); err != nil { + return err + } + if err := unmarshalKey("name", baseState, &b.Name); err != nil { + return err + } + if err := unmarshalKey("type", baseState, &b.Type); err != nil { + return err + } + if err := unmarshalKey("onErrors", baseState, &b.OnErrors); err != nil { + return err + } + if err := unmarshalKey("transition", baseState, &b.Transition); err != nil { + return err + } + if err := unmarshalKey("stateDataFilter", baseState, &b.StateDataFilter); err != nil { + return err + } + if err := unmarshalKey("compensatedBy", baseState, &b.CompensatedBy); err != nil { + return err + } + if err := unmarshalKey("usedForCompensation", baseState, &b.UsedForCompensation); err != nil { + return err + } + if err := unmarshalKey("end", baseState, &b.End); err != nil { + return err + } + if err := unmarshalKey("metadata", baseState, &b.Metadata); err != nil { + return err + } + + return nil +} + +type State struct { + BaseState `json:",omitempty"` + *DelayState `json:",omitempty"` + *EventState `json:",omitempty"` + *OperationState `json:",omitempty"` + *ParallelState `json:",omitempty"` + *SwitchState `json:",omitempty"` + *ForEachState `json:",omitempty"` + *InjectState `json:",omitempty"` + *CallbackState `json:",omitempty"` + *SleepState `json:",omitempty"` +} + +func (s *State) MarshalJSON() ([]byte, error) { + if s == nil { + return []byte("null"), nil + } + r := []byte("") + var errs error + + if s.DelayState != nil { + r, errs = s.DelayState.MarshalJSON() + } + + if s.EventState != nil { + r, errs = s.EventState.MarshalJSON() + } + + if s.OperationState != nil { + r, errs = s.OperationState.MarshalJSON() + } + + if s.ParallelState != nil { + r, errs = s.ParallelState.MarshalJSON() + } + + if s.SwitchState != nil { + r, errs = s.SwitchState.MarshalJSON() + } + + if s.ForEachState != nil { + r, errs = s.ForEachState.MarshalJSON() + } + + if s.InjectState != nil { + r, errs = s.InjectState.MarshalJSON() + } + + if s.CallbackState != nil { + r, errs = s.CallbackState.MarshalJSON() + } + + if s.SleepState != nil { + r, errs = s.SleepState.MarshalJSON() + } + + b, err := s.BaseState.MarshalJSON() + if err != nil { + return nil, err + } + + //remove }{ as BaseState and the State Type needs to be merged together + partialResult := append(b, r...) + result := strings.Replace(string(partialResult), "}{", ",", 1) + return []byte(result), errs +} + +func (s *State) UnmarshalJSON(data []byte) error { -// GetCompensatedBy ... -func (s *BaseState) GetCompensatedBy() string { return s.CompensatedBy } + if err := json.Unmarshal(data, &s.BaseState); err != nil { + return err + } + + mapState := map[string]interface{}{} + if err := json.Unmarshal(data, &mapState); err != nil { + return err + } + + switch mapState["type"] { + case string(StateTypeDelay): + state := &DelayState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.DelayState = state + return nil -// GetTransition ... -func (s *BaseState) GetTransition() *Transition { return s.Transition } + case string(StateTypeEvent): + state := &EventState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.EventState = state + return nil -// GetUsedForCompensation ... -func (s *BaseState) GetUsedForCompensation() bool { return s.UsedForCompensation } + case string(StateTypeOperation): + state := &OperationState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.OperationState = state + return nil -// GetEnd ... -func (s *BaseState) GetEnd() *End { return s.End } + case string(StateTypeParallel): + state := &ParallelState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.ParallelState = state + return nil -// GetID ... -func (s *BaseState) GetID() string { return s.ID } + case string(StateTypeSwitch): + state := &SwitchState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.SwitchState = state + return nil -// GetName ... -func (s *BaseState) GetName() string { return s.Name } + case string(StateTypeForEach): + state := &ForEachState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.ForEachState = state + return nil -// GetType ... -func (s *BaseState) GetType() StateType { return s.Type } + case string(StateTypeInject): + state := &InjectState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.InjectState = state + return nil -// GetStateDataFilter ... -func (s *BaseState) GetStateDataFilter() *StateDataFilter { return s.StateDataFilter } + case string(StateTypeCallback): + state := &CallbackState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.CallbackState = state + return nil -// GetMetadata ... -func (s *BaseState) GetMetadata() *Metadata { return s.Metadata } + case string(StateTypeSleep): + state := &SleepState{} + if err := json.Unmarshal(data, state); err != nil { + return err + } + s.SleepState = state + return nil + + default: + return fmt.Errorf("state type %v not supported", mapState["type"]) + } +} diff --git a/model/switch_state.go b/model/switch_state.go index 937f608..118b18c 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -16,6 +16,7 @@ package model import ( "context" + "encoding/json" "reflect" val "github.com/serverlessworkflow/sdk-go/v2/validator" @@ -33,7 +34,6 @@ func init() { // SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. type SwitchState struct { // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. - BaseState // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition // Required @@ -46,8 +46,16 @@ type SwitchState struct { Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` } -func (in *SwitchState) DeepCopyState() State { - return in +func (s *SwitchState) MarshalJSON() ([]byte, error) { + type Alias SwitchState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(s), + Timeouts: s.Timeouts, + }) + return custom, err } // SwitchStateStructLevelValidation custom validator for SwitchState @@ -55,9 +63,9 @@ func SwitchStateStructLevelValidation(ctx context.Context, structLevel validator switchState := structLevel.Current().Interface().(SwitchState) switch { case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataCnoditions, eventConditions") + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataConditions, eventConditions") case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataCnoditions, eventConditions") + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataConditions, eventConditions") } } @@ -95,9 +103,8 @@ type EventCondition struct { // References a unique event name in the defined workflow events EventRef string `json:"eventRef" validate:"required"` // Event data filter definition - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` - Metadata Metadata `json:"metadata,omitempty"` - + EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` // Explicit transition to end End *End `json:"end" validate:"omitempty"` // Workflow transition if condition is evaluated to true diff --git a/model/switch_state_test.go b/model/switch_state_test.go index 9bfbf17..3136e4a 100644 --- a/model/switch_state_test.go +++ b/model/switch_state_test.go @@ -25,27 +25,29 @@ import ( func TestSwitchStateStructLevelValidation(t *testing.T) { type testCase struct { desp string - obj SwitchState + obj State err string } testCases := []testCase{ { desp: "normal & eventConditions", - obj: SwitchState{ + obj: State{ BaseState: BaseState{ Name: "1", Type: "switch", }, - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ Transition: &Transition{ - NextState: "2", + NextState: "1", + }, + }, + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, }, }, }, @@ -54,21 +56,23 @@ func TestSwitchStateStructLevelValidation(t *testing.T) { }, { desp: "normal & dataConditions", - obj: SwitchState{ + obj: State{ BaseState: BaseState{ Name: "1", Type: "switch", }, - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ Transition: &Transition{ - NextState: "2", + NextState: "1", + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, }, }, }, @@ -77,49 +81,53 @@ func TestSwitchStateStructLevelValidation(t *testing.T) { }, { desp: "missing eventConditions & dataConditions", - obj: SwitchState{ + obj: State{ BaseState: BaseState{ Name: "1", Type: "switch", }, - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, }, }, }, - err: `Key: 'SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, + err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, }, { desp: "exclusive eventConditions & dataConditions", - obj: SwitchState{ + obj: State{ BaseState: BaseState{ Name: "1", Type: "switch", }, - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ Transition: &Transition{ - NextState: "2", + NextState: "1", }, }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", - Transition: &Transition{ - NextState: "2", + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, }, }, }, }, - err: `Key: 'SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, + err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, }, } for _, tc := range testCases { diff --git a/model/util.go b/model/util.go index d90f966..8796e9c 100644 --- a/model/util.go +++ b/model/util.go @@ -67,7 +67,7 @@ func getBytesFromFile(s string) (b []byte, err error) { } // TODO: optimize this - // NOTE: In specification, we can declared independently definitions with another file format, so + // NOTE: In specification, we can declare independent definitions with another file format, so // we must convert independently yaml source to json format data before unmarshal. if strings.HasSuffix(s, ".yaml") || strings.HasSuffix(s, ".yml") { b, err = yaml.YAMLToJSON(b) @@ -75,7 +75,6 @@ func getBytesFromFile(s string) (b []byte, err error) { return nil, err } } - return b, nil } diff --git a/model/workflow.go b/model/workflow.go index 85df8e0..d3ffd58 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -191,24 +191,12 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { } w.States = make([]State, len(rawStates)) - mapState := map[string]interface{}{} for i, rawState := range rawStates { - if err := json.Unmarshal(rawState, &mapState); err != nil { + if err := json.Unmarshal(rawState, &w.States[i]); err != nil { return err } - - actionsMode, ok := getActionsModelMapping(mapState["type"].(string)) - if !ok { - return fmt.Errorf("state %s not supported", mapState["type"]) - } - state := actionsMode - - if err := json.Unmarshal(rawState, &state); err != nil { - return err - } - w.States[i] = state - mapState = map[string]interface{}{} } + if _, ok := workflowMap["events"]; ok { if err := json.Unmarshal(workflowMap["events"], &w.Events); err != nil { var s string @@ -520,6 +508,7 @@ type End struct { // UnmarshalJSON ... func (e *End) UnmarshalJSON(data []byte) error { + endMap := make(map[string]json.RawMessage) if err := json.Unmarshal(data, &endMap); err != nil { e.Terminate = false @@ -588,8 +577,8 @@ type ProduceEvent struct { // References a name of a defined event EventRef string `json:"eventRef" validate:"required"` // TODO: add object or string data type - // If String, expression which selects parts of the states data output to become the data of the produced event. If object a custom object to become the data of produced event. - // TODO + // If String, expression which selects parts of the states data output to become the data of the produced event. + // If object a custom object to become the data of produced event. Data string `json:"data,omitempty"` // Add additional event extension context attributes ContextAttributes map[string]string `json:"contextAttributes,omitempty"` diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index e53eb76..93ce084 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -43,7 +43,11 @@ func (in *Action) DeepCopyInto(out *Action) { *out = new(WorkflowRef) **out = **in } - out.Sleep = in.Sleep + if in.Sleep != nil { + in, out := &in.Sleep, &out.Sleep + *out = new(Sleep) + **out = **in + } if in.NonRetryableErrors != nil { in, out := &in.NonRetryableErrors, &out.NonRetryableErrors *out = make([]string, len(*in)) @@ -349,14 +353,17 @@ func (in *BranchTimeouts) DeepCopy() *BranchTimeouts { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *CallbackState) DeepCopyInto(out *CallbackState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) in.Action.DeepCopyInto(&out.Action) if in.Timeouts != nil { in, out := &in.Timeouts, &out.Timeouts *out = new(CallbackStateTimeout) (*in).DeepCopyInto(*out) } - out.EventDataFilter = in.EventDataFilter + if in.EventDataFilter != nil { + in, out := &in.EventDataFilter, &out.EventDataFilter + *out = new(EventDataFilter) + **out = **in + } return } @@ -578,7 +585,6 @@ func (in *DefaultCondition) DeepCopy() *DefaultCondition { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *DelayState) DeepCopyInto(out *DelayState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) return } @@ -661,7 +667,11 @@ func (in *Event) DeepCopy() *Event { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *EventCondition) DeepCopyInto(out *EventCondition) { *out = *in - out.EventDataFilter = in.EventDataFilter + if in.EventDataFilter != nil { + in, out := &in.EventDataFilter, &out.EventDataFilter + *out = new(EventDataFilter) + **out = **in + } if in.Metadata != nil { in, out := &in.Metadata, &out.Metadata *out = make(Metadata, len(*in)) @@ -711,7 +721,11 @@ func (in *EventDataFilter) DeepCopy() *EventDataFilter { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *EventRef) DeepCopyInto(out *EventRef) { *out = *in - in.Data.DeepCopyInto(&out.Data) + if in.Data != nil { + in, out := &in.Data, &out.Data + *out = new(Object) + (*in).DeepCopyInto(*out) + } if in.ContextAttributes != nil { in, out := &in.ContextAttributes, &out.ContextAttributes *out = make(map[string]Object, len(*in)) @@ -735,7 +749,6 @@ func (in *EventRef) DeepCopy() *EventRef { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *EventState) DeepCopyInto(out *EventState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.OnEvents != nil { in, out := &in.OnEvents, &out.OnEvents *out = make([]OnEvents, len(*in)) @@ -785,7 +798,6 @@ func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ForEachState) DeepCopyInto(out *ForEachState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.BatchSize != nil { in, out := &in.BatchSize, &out.BatchSize *out = new(intstr.IntOrString) @@ -880,7 +892,6 @@ func (in *FunctionRef) DeepCopy() *FunctionRef { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *InjectState) DeepCopyInto(out *InjectState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.Data != nil { in, out := &in.Data, &out.Data *out = make(map[string]Object, len(*in)) @@ -1060,7 +1071,6 @@ func (in *OnEvents) DeepCopy() *OnEvents { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *OperationState) DeepCopyInto(out *OperationState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.Actions != nil { in, out := &in.Actions, &out.Actions *out = make([]Action, len(*in)) @@ -1110,7 +1120,6 @@ func (in *OperationStateTimeout) DeepCopy() *OperationStateTimeout { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ParallelState) DeepCopyInto(out *ParallelState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.Branches != nil { in, out := &in.Branches, &out.Branches *out = make([]Branch, len(*in)) @@ -1264,7 +1273,6 @@ func (in *Sleep) DeepCopy() *Sleep { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SleepState) DeepCopyInto(out *SleepState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) if in.Timeouts != nil { in, out := &in.Timeouts, &out.Timeouts *out = new(SleepStateTimeout) @@ -1325,6 +1333,68 @@ func (in *Start) DeepCopy() *Start { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *State) DeepCopyInto(out *State) { + *out = *in + in.BaseState.DeepCopyInto(&out.BaseState) + if in.DelayState != nil { + in, out := &in.DelayState, &out.DelayState + *out = new(DelayState) + **out = **in + } + if in.EventState != nil { + in, out := &in.EventState, &out.EventState + *out = new(EventState) + (*in).DeepCopyInto(*out) + } + if in.OperationState != nil { + in, out := &in.OperationState, &out.OperationState + *out = new(OperationState) + (*in).DeepCopyInto(*out) + } + if in.ParallelState != nil { + in, out := &in.ParallelState, &out.ParallelState + *out = new(ParallelState) + (*in).DeepCopyInto(*out) + } + if in.SwitchState != nil { + in, out := &in.SwitchState, &out.SwitchState + *out = new(SwitchState) + (*in).DeepCopyInto(*out) + } + if in.ForEachState != nil { + in, out := &in.ForEachState, &out.ForEachState + *out = new(ForEachState) + (*in).DeepCopyInto(*out) + } + if in.InjectState != nil { + in, out := &in.InjectState, &out.InjectState + *out = new(InjectState) + (*in).DeepCopyInto(*out) + } + if in.CallbackState != nil { + in, out := &in.CallbackState, &out.CallbackState + *out = new(CallbackState) + (*in).DeepCopyInto(*out) + } + if in.SleepState != nil { + in, out := &in.SleepState, &out.SleepState + *out = new(SleepState) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new State. +func (in *State) DeepCopy() *State { + if in == nil { + return nil + } + out := new(State) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *StateDataFilter) DeepCopyInto(out *StateDataFilter) { *out = *in @@ -1360,7 +1430,6 @@ func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SwitchState) DeepCopyInto(out *SwitchState) { *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) if in.EventConditions != nil { in, out := &in.EventConditions, &out.EventConditions @@ -1472,9 +1541,7 @@ func (in *Workflow) DeepCopyInto(out *Workflow) { in, out := &in.States, &out.States *out = make([]State, len(*in)) for i := range *in { - if (*in)[i] != nil { - (*out)[i] = (*in)[i].DeepCopyState() - } + (*in)[i].DeepCopyInto(&(*out)[i]) } } if in.Events != nil { diff --git a/parser/parser_test.go b/parser/parser_test.go index 2c7c3ee..547557a 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -16,11 +16,14 @@ package parser import ( "encoding/json" + "fmt" "os" "path/filepath" "strings" "testing" + "k8s.io/apimachinery/pkg/util/intstr" + "github.com/stretchr/testify/assert" "github.com/serverlessworkflow/sdk-go/v2/model" @@ -68,48 +71,52 @@ func TestFromFile(t *testing.T) { func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Greeting Workflow", w.Name) assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0]) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.IsType(t, &model.OperationState{}, w.States[0].OperationState) + assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) }, }, { "./testdata/workflows/actiondata-defaultvalue.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0].(*model.OperationState)) - assert.Equal(t, true, w.States[0].(*model.OperationState).Actions[0].ActionDataFilter.UseResults) - assert.Equal(t, "greeting", w.States[0].(*model.OperationState).Actions[0].Name) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].OperationState) + assert.Equal(t, true, w.States[0].OperationState.Actions[0].ActionDataFilter.UseResults) + assert.Equal(t, "greeting", w.States[0].OperationState.Actions[0].Name) }, }, { "./testdata/workflows/greetings.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Greeting Workflow", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) + assert.NotNil(t, w.States[0]) + assert.IsType(t, "idx", w.States[0].ID) assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) - assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) + assert.NotEmpty(t, w.States[0].OperationState.Actions) + assert.NotNil(t, w.States[0].OperationState.Actions[0].FunctionRef) + assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) }, }, { "./testdata/workflows/eventbaseddataandswitch.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.Equal(t, "Start", w.States[0].GetName()) - assert.Equal(t, "CheckVisaStatus", w.States[1].GetName()) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - assert.IsType(t, &model.SwitchState{}, w.States[1]) - assert.Equal(t, "PT1H", w.States[1].(*model.SwitchState).Timeouts.EventTimeout) + assert.Equal(t, "Start", w.States[0].Name) + assert.Equal(t, "CheckVisaStatus", w.States[1].Name) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + assert.NotNil(t, w.States[1]) + assert.NotNil(t, w.States[1].SwitchState) + assert.Equal(t, "PT1H", w.States[1].SwitchState.Timeouts.EventTimeout) }, }, { "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { - operationState := w.States[0].(*model.OperationState) + operationState := w.States[0].OperationState assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) }, }, { "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) + assert.NotNil(t, w.States[0]) + eventState := w.States[0].EventState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) @@ -120,8 +127,8 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) + assert.NotNil(t, w.States[0]) + eventState := w.States[0].EventState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) @@ -133,8 +140,8 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) + assert.NotNil(t, w.States[0]) + eventState := w.States[0].EventState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) @@ -145,8 +152,8 @@ func TestFromFile(t *testing.T) { "./testdata/workflows/eventbasedgreeting.sw.p.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Event Based Greeting Workflow", w.Name) assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) + assert.NotNil(t, w.States[0]) + eventState := w.States[0].EventState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) @@ -154,25 +161,26 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - eventState := w.States[0].(*model.SwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.EventConditions) - assert.NotEmpty(t, eventState.Name) - assert.IsType(t, model.EventCondition{}, eventState.EventConditions[0]) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + assert.NotEmpty(t, w.States[0].EventConditions) + assert.Equal(t, "CheckVisaStatus", w.States[0].Name) + assert.IsType(t, model.EventCondition{}, w.States[0].EventConditions[0]) }, }, { "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - eventState := w.States[0].(*model.SwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + switchState := w.States[0].SwitchState + assert.NotNil(t, switchState) + assert.NotEmpty(t, switchState.DataConditions) + assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) + assert.NotNil(t, w.States[1]) + assert.NotNil(t, w.States[1].OperationState) + operationState := w.States[1].OperationState assert.NotNil(t, operationState) assert.NotEmpty(t, operationState.Actions) assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) @@ -187,15 +195,17 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - eventState := w.States[0].(*model.SwitchState) - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + switchState := w.States[0].SwitchState + assert.NotNil(t, switchState) + assert.NotEmpty(t, switchState.DataConditions) + assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.IsType(t, &model.OperationState{}, w.States[1]) - operationState := w.States[1].(*model.OperationState) + assert.NotNil(t, w.States[1]) + assert.NotNil(t, w.States[1].OperationState) + operationState := w.States[1].OperationState assert.NotNil(t, operationState) assert.NotEmpty(t, operationState.Actions) assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) @@ -218,8 +228,9 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - eventState := w.States[0].(*model.SwitchState) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + eventState := w.States[0].SwitchState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) @@ -227,8 +238,9 @@ func TestFromFile(t *testing.T) { }, }, { "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { - assert.IsType(t, &model.SwitchState{}, w.States[0]) - eventState := w.States[0].(*model.SwitchState) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + eventState := w.States[0].SwitchState assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.DataConditions) assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) @@ -237,8 +249,9 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/checkinbox.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].OperationState) + operationState := w.States[0].OperationState assert.NotNil(t, operationState) assert.NotEmpty(t, operationState.Actions) assert.Len(t, w.States, 2) @@ -247,41 +260,42 @@ func TestFromFile(t *testing.T) { // validates: https://github.com/serverlessworkflow/specification/pull/175/ "./testdata/workflows/provisionorders.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Provision Orders", w.Name) - assert.IsType(t, &model.OperationState{}, w.States[0]) - operationState := w.States[0].(*model.OperationState) - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, operationState.OnErrors, 3) - assert.Equal(t, "Missing order id", operationState.OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", operationState.OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", operationState.OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].OperationState) + assert.NotEmpty(t, w.States[0].OperationState.Actions) + assert.Len(t, w.States[0].OnErrors, 3) + assert.Equal(t, "Missing order id", w.States[0].OnErrors[0].ErrorRef) + assert.Equal(t, "MissingId", w.States[0].OnErrors[0].Transition.NextState) + assert.Equal(t, "Missing order item", w.States[0].OnErrors[1].ErrorRef) + assert.Equal(t, "MissingItem", w.States[0].OnErrors[1].Transition.NextState) + assert.Equal(t, "Missing order quantity", w.States[0].OnErrors[2].ErrorRef) + assert.Equal(t, "MissingQuantity", w.States[0].OnErrors[2].Transition.NextState) }, }, { "./testdata/workflows/checkinbox.cron-test.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Check Inbox Workflow", w.Name) assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) - assert.False(t, w.States[1].GetEnd().Terminate) + assert.Equal(t, "checkInboxFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) + assert.Equal(t, "SendTextForHighPriority", w.States[0].Transition.NextState) + assert.False(t, w.States[1].End.Terminate) }, }, { "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.IsType(t, &model.SwitchState{}, w.States[0]) - dataBaseSwitchState := w.States[0].(*model.SwitchState) - assert.NotNil(t, dataBaseSwitchState) - assert.NotEmpty(t, dataBaseSwitchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].GetName()) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].SwitchState) + switchState := w.States[0].SwitchState + assert.NotNil(t, switchState) + assert.NotEmpty(t, switchState.DataConditions) + assert.Equal(t, "CheckApplication", w.States[0].Name) }, }, { // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 "./testdata/workflows/patientonboarding.sw.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Patient Onboarding Workflow", w.Name) - assert.IsType(t, &model.EventState{}, w.States[0]) - eventState := w.States[0].(*model.EventState) + assert.NotNil(t, w.States[0]) + assert.NotNil(t, w.States[0].EventState) + eventState := w.States[0].EventState assert.NotNil(t, eventState) assert.NotEmpty(t, w.Retries) assert.Len(t, w.Retries, 1) @@ -328,13 +342,13 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Notify Customer", w.Name) - eventState := w.States[1].(*model.SwitchState) + switchState := w.States[1].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) + assert.NotNil(t, switchState) + assert.NotEmpty(t, switchState.DataConditions) + assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - endDataCondition := eventState.DataConditions[0] + endDataCondition := switchState.DataConditions[0] assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) assert.Equal(t, model.FromString("${ del(.customerCount) }"), endDataCondition.End.ContinueAs.Data) @@ -371,132 +385,166 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) // Delay state - assert.NotEmpty(t, w.States[0].(*model.DelayState).TimeDelay) - assert.Equal(t, "GreetDelay", w.States[0].GetName()) - assert.Equal(t, model.StateType("delay"), w.States[0].GetType()) - assert.Equal(t, "StoreCarAuctionBid", w.States[0].(*model.DelayState).Transition.NextState) + assert.NotEmpty(t, w.States[0].DelayState.TimeDelay) + assert.Equal(t, "GreetDelay", w.States[0].Name) + assert.Equal(t, model.StateTypeDelay, w.States[0].Type) + assert.Equal(t, "StoreCarAuctionBid", w.States[0].Transition.NextState) // Event state - assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents) - assert.Equal(t, "StoreCarAuctionBid", w.States[1].GetName()) - assert.Equal(t, model.StateType("event"), w.States[1].GetType()) - assert.Equal(t, true, w.States[1].(*model.EventState).Exclusive) - assert.NotEmpty(t, true, w.States[1].(*model.EventState).OnEvents[0]) - assert.Equal(t, true, w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.UseData) - assert.Equal(t, "test", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.Data) - assert.Equal(t, "testing", w.States[1].(*model.EventState).OnEvents[0].EventDataFilter.ToStateData) - assert.Equal(t, model.ActionModeParallel, w.States[1].(*model.EventState).OnEvents[0].ActionMode) - - assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents[0].Actions[0].FunctionRef) - assert.NotEmpty(t, w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef) - assert.Equal(t, model.FromString("${ .patientInfo }"), w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef.Data) - assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].(*model.EventState).OnEvents[0].Actions[1].EventRef.ContextAttributes) - - assert.Equal(t, "PT1S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[1].(*model.EventState).Timeouts.StateExecTimeout.Single) - assert.Equal(t, "PT1H", w.States[1].(*model.EventState).Timeouts.EventTimeout) - assert.Equal(t, "PT3S", w.States[1].(*model.EventState).Timeouts.ActionExecTimeout) + assert.NotEmpty(t, w.States[1].EventState.OnEvents) + assert.Equal(t, "StoreCarAuctionBid", w.States[1].Name) + assert.Equal(t, model.StateTypeEvent, w.States[1].Type) + assert.Equal(t, true, w.States[1].EventState.Exclusive) + assert.NotEmpty(t, true, w.States[1].EventState.OnEvents[0]) + assert.Equal(t, []string{"CarBidEvent"}, w.States[1].EventState.OnEvents[0].EventRefs) + assert.Equal(t, true, w.States[1].EventState.OnEvents[0].EventDataFilter.UseData) + assert.Equal(t, "test", w.States[1].EventState.OnEvents[0].EventDataFilter.Data) + assert.Equal(t, "testing", w.States[1].EventState.OnEvents[0].EventDataFilter.ToStateData) + assert.Equal(t, model.ActionModeParallel, w.States[1].EventState.OnEvents[0].ActionMode) + + assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef) + assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.RefName) + assert.Equal(t, "funcref1", w.States[1].EventState.OnEvents[0].Actions[0].Name) + assert.Equal(t, map[string]model.Object{"bid": model.FromString("${ .bid }")}, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.Arguments) + + assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[1].EventRef) + assert.Equal(t, "eventRefName", w.States[1].EventState.OnEvents[0].Actions[1].Name) + assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ResultEventRef) + + data := model.FromString("${ .patientInfo }") + assert.Equal(t, &data, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.Data) + assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ContextAttributes) + + assert.Equal(t, "PT1S", w.States[1].EventState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[1].EventState.Timeouts.StateExecTimeout.Single) + assert.Equal(t, "PT1H", w.States[1].EventState.Timeouts.EventTimeout) + assert.Equal(t, "PT3S", w.States[1].EventState.Timeouts.ActionExecTimeout) // Parallel state - assert.NotEmpty(t, w.States[2].(*model.ParallelState).Branches) - assert.Equal(t, "PT5H", w.States[2].(*model.ParallelState).Branches[0].Timeouts.ActionExecTimeout) - assert.Equal(t, "PT6M", w.States[2].(*model.ParallelState).Branches[0].Timeouts.BranchExecTimeout) - assert.Equal(t, "ParallelExec", w.States[2].GetName()) - assert.Equal(t, model.StateType("parallel"), w.States[2].GetType()) - assert.Equal(t, model.CompletionType("allOf"), w.States[2].(*model.ParallelState).CompletionType) - assert.Equal(t, "PT6M", w.States[2].(*model.ParallelState).Timeouts.BranchExecTimeout) - assert.Equal(t, "PT1S", w.States[2].(*model.ParallelState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[2].(*model.ParallelState).Timeouts.StateExecTimeout.Single) + assert.NotEmpty(t, w.States[2].ParallelState.Branches) + assert.Equal(t, "ShortDelayBranch", w.States[2].ParallelState.Branches[0].Name) + assert.Equal(t, "shortdelayworkflowid", w.States[2].ParallelState.Branches[0].Actions[0].SubFlowRef.WorkflowID) + assert.Equal(t, "PT5H", w.States[2].ParallelState.Branches[0].Timeouts.ActionExecTimeout) + assert.Equal(t, "PT6M", w.States[2].ParallelState.Branches[0].Timeouts.BranchExecTimeout) + assert.Equal(t, "LongDelayBranch", w.States[2].ParallelState.Branches[1].Name) + assert.Equal(t, "longdelayworkflowid", w.States[2].ParallelState.Branches[1].Actions[0].SubFlowRef.WorkflowID) + assert.Equal(t, "ParallelExec", w.States[2].Name) + assert.Equal(t, model.StateTypeParallel, w.States[2].Type) + assert.Equal(t, model.CompletionTypeAtLeast, w.States[2].ParallelState.CompletionType) + assert.Equal(t, "PT6M", w.States[2].ParallelState.Timeouts.BranchExecTimeout) + assert.Equal(t, "PT1S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Single) + assert.Equal(t, intstr.IntOrString{IntVal: 13}, w.States[2].ParallelState.NumCompleted) // Switch state - assert.NotEmpty(t, w.States[3].(*model.SwitchState).EventConditions) - assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].GetName()) - assert.Equal(t, model.StateType("switch"), w.States[3].GetType()) - assert.Equal(t, "PT1H", w.States[3].(*model.SwitchState).Timeouts.EventTimeout) - assert.Equal(t, "PT1S", w.States[3].(*model.SwitchState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[3].(*model.SwitchState).Timeouts.StateExecTimeout.Single) - assert.Equal(t, &model.Transition{ - NextState: "HandleNoVisaDecision", - }, w.States[3].(*model.SwitchState).DefaultCondition.Transition) + assert.NotEmpty(t, w.States[3].SwitchState.EventConditions) + assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].Name) + assert.Equal(t, model.StateTypeSwitch, w.States[3].Type) + assert.Equal(t, "visaApprovedEvent", w.States[3].EventConditions[0].Name) + assert.Equal(t, "visaApprovedEventRef", w.States[3].EventConditions[0].EventRef) + assert.Equal(t, "HandleApprovedVisa", w.States[3].EventConditions[0].Transition.NextState) + assert.Equal(t, model.Metadata{"mastercard": model.Object{Type: 1, IntVal: 0, StrVal: "disallowed", RawValue: json.RawMessage(nil)}, + "visa": model.Object{Type: 1, IntVal: 0, StrVal: "allowed", RawValue: json.RawMessage(nil)}}, + w.States[3].EventConditions[0].Metadata) + assert.Equal(t, "visaRejectedEvent", w.States[3].EventConditions[1].EventRef) + assert.Equal(t, "HandleRejectedVisa", w.States[3].EventConditions[1].Transition.NextState) + assert.Equal(t, model.Metadata{"test": model.Object{Type: 1, IntVal: 0, StrVal: "tested", RawValue: json.RawMessage(nil)}}, + w.States[3].EventConditions[1].Metadata) + assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) + assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) + assert.Equal(t, &model.Transition{NextState: "HandleNoVisaDecision"}, w.States[3].SwitchState.DefaultCondition.Transition) // DataBasedSwitchState - dataBased := w.States[4].(*model.SwitchState) + dataBased := w.States[4].SwitchState assert.NotEmpty(t, dataBased.DataConditions) - assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].GetName()) + assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].Name) dataCondition := dataBased.DataConditions[0] assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) assert.Equal(t, &model.Transition{ NextState: "RejectApplication", - }, w.States[4].(*model.SwitchState).DefaultCondition.Transition) - assert.Equal(t, "PT1S", w.States[4].(*model.SwitchState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[4].(*model.SwitchState).Timeouts.StateExecTimeout.Single) + }, w.States[4].DefaultCondition.Transition) + assert.Equal(t, "PT1S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Single) // operation state - assert.NotEmpty(t, w.States[5].(*model.OperationState).Actions) - assert.Equal(t, "GreetSequential", w.States[5].GetName()) - assert.Equal(t, model.StateType("operation"), w.States[5].GetType()) - assert.Equal(t, model.ActionModeSequential, w.States[5].(*model.OperationState).ActionMode) - assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].Name) - assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].Name) - assert.NotNil(t, w.States[5].(*model.OperationState).Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[5].(*model.OperationState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "example", w.States[5].(*model.OperationState).Actions[0].EventRef.TriggerEventRef) - assert.Equal(t, "example", w.States[5].(*model.OperationState).Actions[0].EventRef.ResultEventRef) - assert.Equal(t, "PT1H", w.States[5].(*model.OperationState).Actions[0].EventRef.ResultEventTimeout) - assert.Equal(t, "PT1H", w.States[5].(*model.OperationState).Timeouts.ActionExecTimeout) - assert.Equal(t, "PT1S", w.States[5].(*model.OperationState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[5].(*model.OperationState).Timeouts.StateExecTimeout.Single) + assert.NotEmpty(t, w.States[5].OperationState.Actions) + assert.Equal(t, "GreetSequential", w.States[5].Name) + assert.Equal(t, model.StateTypeOperation, w.States[5].Type) + assert.Equal(t, model.ActionModeSequential, w.States[5].OperationState.ActionMode) + assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) + assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) + assert.NotNil(t, w.States[5].OperationState.Actions[0].FunctionRef) + assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].FunctionRef.RefName) + assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) + assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) + assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) + assert.Equal(t, "PT1H", w.States[5].OperationState.Timeouts.ActionExecTimeout) + assert.Equal(t, "PT1S", w.States[5].OperationState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT2S", w.States[5].OperationState.Timeouts.StateExecTimeout.Single) // forEach state - assert.NotEmpty(t, w.States[6].(*model.ForEachState).Actions) - assert.Equal(t, "SendTextForHighPriority", w.States[6].GetName()) - assert.Equal(t, model.ForEachModeTypeParallel, w.States[6].(*model.ForEachState).Mode) - assert.Equal(t, model.StateType("foreach"), w.States[6].GetType()) - assert.Equal(t, "${ .messages }", w.States[6].(*model.ForEachState).InputCollection) - assert.NotNil(t, w.States[6].(*model.ForEachState).Actions) - assert.Equal(t, "test", w.States[6].(*model.ForEachState).Actions[0].Name) - assert.NotNil(t, w.States[6].(*model.ForEachState).Actions[0].FunctionRef) - assert.Equal(t, "sendTextFunction", w.States[6].(*model.ForEachState).Actions[0].FunctionRef.RefName) - assert.Equal(t, "example1", w.States[6].(*model.ForEachState).Actions[0].EventRef.TriggerEventRef) - assert.Equal(t, "example1", w.States[6].(*model.ForEachState).Actions[0].EventRef.ResultEventRef) - assert.Equal(t, "PT12H", w.States[6].(*model.ForEachState).Actions[0].EventRef.ResultEventTimeout) - assert.Equal(t, "PT11H", w.States[6].(*model.ForEachState).Timeouts.ActionExecTimeout) - assert.Equal(t, "PT11S", w.States[6].(*model.ForEachState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22S", w.States[6].(*model.ForEachState).Timeouts.StateExecTimeout.Single) + assert.NotEmpty(t, w.States[6].ForEachState.Actions) + assert.Equal(t, "SendTextForHighPriority", w.States[6].Name) + assert.Equal(t, model.ForEachModeTypeSequential, w.States[6].ForEachState.Mode) + assert.Equal(t, model.StateTypeForEach, w.States[6].Type) + assert.Equal(t, "${ .messages }", w.States[6].ForEachState.InputCollection) + assert.Equal(t, "${ .outputMessages }", w.States[6].ForEachState.OutputCollection) + assert.Equal(t, "${ .this }", w.States[6].ForEachState.IterationParam) + + batchSize := intstr.FromInt(45) + assert.Equal(t, &batchSize, w.States[6].ForEachState.BatchSize) + + assert.NotNil(t, w.States[6].ForEachState.Actions) + assert.Equal(t, "test", w.States[6].ForEachState.Actions[0].Name) + assert.NotNil(t, w.States[6].ForEachState.Actions[0].FunctionRef) + assert.Equal(t, "sendTextFunction", w.States[6].ForEachState.Actions[0].FunctionRef.RefName) + assert.Equal(t, map[string]model.Object{"message": model.FromString("${ .singlemessage }")}, w.States[6].ForEachState.Actions[0].FunctionRef.Arguments) + + assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) + assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) + assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) + + assert.Equal(t, "PT11H", w.States[6].ForEachState.Timeouts.ActionExecTimeout) + assert.Equal(t, "PT11S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Single) // Inject state - assert.Equal(t, map[string]model.Object{"result": model.FromString("Hello World, last state!")}, w.States[7].(*model.InjectState).Data) - assert.Equal(t, "HelloInject", w.States[7].GetName()) - assert.Equal(t, model.StateType("inject"), w.States[7].GetType()) - assert.Equal(t, "PT11M", w.States[7].(*model.InjectState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[7].(*model.InjectState).Timeouts.StateExecTimeout.Single) + assert.Equal(t, "HelloInject", w.States[7].Name) + assert.Equal(t, model.StateTypeInject, w.States[7].Type) + assert.Equal(t, map[string]model.Object{"result": model.FromString("Hello World, last state!")}, w.States[7].InjectState.Data) + assert.Equal(t, "PT11M", w.States[7].InjectState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22M", w.States[7].InjectState.Timeouts.StateExecTimeout.Single) // callback state - assert.NotEmpty(t, w.States[8].(*model.CallbackState).Action) - assert.Equal(t, "CheckCreditCallback", w.States[8].GetName()) - assert.Equal(t, model.StateType("callback"), w.States[8].GetType()) - assert.Equal(t, "callCreditCheckMicroservice", w.States[8].(*model.CallbackState).Action.FunctionRef.RefName) + assert.NotEmpty(t, w.States[8].CallbackState.Action) + assert.Equal(t, "CheckCreditCallback", w.States[8].Name) + assert.Equal(t, model.StateTypeCallback, w.States[8].Type) + assert.Equal(t, "callCreditCheckMicroservice", w.States[8].CallbackState.Action.FunctionRef.RefName) assert.Equal(t, map[string]model.Object{"argsObj": model.FromRaw(map[string]interface{}{"age": 10, "name": "hi"}), "customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, - w.States[8].(*model.CallbackState).Action.FunctionRef.Arguments) - assert.Equal(t, "PT10S", w.States[8].(*model.CallbackState).Action.Sleep.Before) - assert.Equal(t, "PT20S", w.States[8].(*model.CallbackState).Action.Sleep.After) - assert.Equal(t, "PT150M", w.States[8].(*model.CallbackState).Timeouts.ActionExecTimeout) - assert.Equal(t, "PT34S", w.States[8].(*model.CallbackState).Timeouts.EventTimeout) - assert.Equal(t, "PT115M", w.States[8].(*model.CallbackState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[8].(*model.CallbackState).Timeouts.StateExecTimeout.Single) + w.States[8].CallbackState.Action.FunctionRef.Arguments) + assert.Equal(t, "PT10S", w.States[8].CallbackState.Action.Sleep.Before) + assert.Equal(t, "PT20S", w.States[8].CallbackState.Action.Sleep.After) + assert.Equal(t, "PT150M", w.States[8].CallbackState.Timeouts.ActionExecTimeout) + assert.Equal(t, "PT34S", w.States[8].CallbackState.Timeouts.EventTimeout) + assert.Equal(t, "PT115M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT22M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Single) + + assert.Equal(t, true, w.States[8].CallbackState.EventDataFilter.UseData) + assert.Equal(t, "test data", w.States[8].CallbackState.EventDataFilter.Data) + assert.Equal(t, "${ .customer }", w.States[8].CallbackState.EventDataFilter.ToStateData) // sleepState - assert.NotEmpty(t, w.States[9].(*model.SleepState).Duration) - assert.Equal(t, "WaitForCompletionSleep", w.States[9].GetName()) - assert.Equal(t, model.StateType("sleep"), w.States[9].GetType()) - assert.Equal(t, "PT5S", w.States[9].(*model.SleepState).Duration) - assert.NotNil(t, w.States[9].(*model.SleepState).Timeouts) - assert.Equal(t, "PT100S", w.States[9].(*model.SleepState).Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT200S", w.States[9].(*model.SleepState).Timeouts.StateExecTimeout.Single) - assert.Equal(t, &model.Transition{ - NextState: "GetJobStatus", - }, w.States[9].(*model.SleepState).Transition) + assert.NotEmpty(t, w.States[9].SleepState.Duration) + assert.Equal(t, "WaitForCompletionSleep", w.States[9].Name) + assert.Equal(t, model.StateTypeSleep, w.States[9].Type) + assert.Equal(t, "PT5S", w.States[9].SleepState.Duration) + assert.NotNil(t, w.States[9].SleepState.Timeouts) + assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) + assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) + assert.Equal(t, &model.Transition{NextState: "GetJobStatus"}, w.States[9].Transition) + assert.Equal(t, true, w.States[9].End.Terminate) }, }, } @@ -505,6 +553,7 @@ func TestFromFile(t *testing.T) { file.name, func(t *testing.T) { workflow, err := FromFile(file.name) if assert.NoError(t, err, "Test File %s", file.name) { + assert.NotNil(t, workflow, "Test File %s", file.name) file.f(t, workflow) } @@ -519,7 +568,7 @@ func TestUnmarshalWorkflowBasicTests(t *testing.T) { id: helloworld version: '1.0.0' specVersion: '0.8' -name: Hello World Workflow +name: TestUnmarshalWorkflowBasicTests description: Inject Hello World start: Hello State states: @@ -582,6 +631,7 @@ states: assert.Nil(t, err) assert.NotNil(t, workflow.Auth) + // TODO correctly marshall end: true (fixed by https://github.com/serverlessworkflow/sdk-go/pull/147) b, _ := json.Marshal(workflow) assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{},\"data\":{\"result\":\"Hello World!\"}}]}", string(b)) @@ -649,12 +699,12 @@ states: } func TestUnmarshalWorkflowSwitchState(t *testing.T) { - t.Run("WorkflowSwitchStateEventConditions", func(t *testing.T) { + t.Run("WorkflowStatesTest", func(t *testing.T) { workflow, err := FromYAMLSource([]byte(` id: helloworld version: '1.0.0' specVersion: '0.8' -name: Hello World Workflow +name: WorkflowStatesTest description: Inject Hello World start: Hello State metadata: @@ -669,22 +719,125 @@ auth: auth1: auth1 auth2: auth2 states: -- name: Hello State +- name: GreetDelay + type: delay + timeDelay: PT5S + transition: + nextState: Hello State +- name: StoreCarAuctionBid + type: event + exclusive: true + onEvents: + - eventRefs: + - CarBidEvent + eventDataFilter: + useData: true + data: "test" + toStateData: "testing" + actionMode: parallel + actions: + - functionRef: + refName: StoreBidFunction + arguments: + bid: "${ .bid }" + name: bidFunctionRef + - eventRef: + triggerEventRef: StoreBidFunction + data: "${ .patientInfo }" + resultEventRef: StoreBidFunction + contextAttributes: + customer: "${ .thatBid }" + time: 32 + name: bidEventRef + timeouts: + eventTimeout: PT1H + actionExecTimeout: PT3S + stateExecTimeout: + total: PT1S + single: PT2S +- name: ParallelExec + type: parallel + completionType: atLeast + branches: + - name: ShortDelayBranch + actions: + - subFlowRef: shortdelayworkflowid + timeouts: + actionExecTimeout: "PT5H" + branchExecTimeout: "PT6M" + - name: LongDelayBranch + actions: + - subFlowRef: longdelayworkflowid + timeouts: + branchExecTimeout: "PT6M" + stateExecTimeout: + total: PT1S + single: PT2S + numCompleted: 13 +- name: CheckVisaStatusSwitchEventBased type: switch eventConditions: - - eventRef: visaApprovedEvent + - name: visaApprovedEvent + eventRef: visaApprovedEventRef transition: nextState: HandleApprovedVisa + metadata: + visa: allowed + mastercard: disallowed - eventRef: visaRejectedEvent transition: nextState: HandleRejectedVisa + metadata: + test: tested + timeouts: + eventTimeout: PT10H + stateExecTimeout: + total: PT10S + single: PT20S defaultCondition: transition: nextState: CheckCreditCallback +- name: SendTextForHighPriority + type: foreach + inputCollection: "${ .messages }" + outputCollection: "${ .outputMessages }" + iterationParam: "${ .this }" + batchSize: 45 + mode: sequential + actions: + - name: test + functionRef: + refName: sendTextFunction + arguments: + message: "${ .singlemessage }" + eventRef: + triggerEventRef: example1 + resultEventRef: example2 + # Added "resultEventTimeout" for action eventref + resultEventTimeout: PT12H + timeouts: + actionExecTimeout: PT11H + stateExecTimeout: + total: PT11S + single: PT22S - name: HelloInject type: inject data: result: Hello World, another state! + timeouts: + stateExecTimeout: + total: PT11M + single: PT22M +- name: WaitForCompletionSleep + type: sleep + duration: PT5S + transition: GetJobStatus + timeouts: + stateExecTimeout: + total: PT100S + single: PT200S + end: + terminate: true - name: CheckCreditCallback type: callback action: @@ -704,9 +857,13 @@ states: before: PT10S after: PT20S eventRef: CreditCheckCompletedEvent + eventDataFilter: + useData: true + data: "test data" + toStateData: "${ .customer }" timeouts: - actionExecTimeout: PT150M - eventTimeout: PT34S + actionExecTimeout: PT199M + eventTimeout: PT348S stateExecTimeout: total: PT115M single: PT22M @@ -715,102 +872,65 @@ states: actions: - subFlowRef: workflowId: handleApprovedVisaWorkflowID + name: subFlowRefName - eventRef: triggerEventRef: StoreBidFunction data: "${ .patientInfo }" resultEventRef: StoreBidFunction contextAttributes: customer: "${ .customer }" - time: 48 - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleRejectedVisaWorkflowID - end: - terminate: true -- name: HandleNoVisaDecision - type: operation - actions: - - subFlowRef: - workflowId: handleNoVisaDecisionWorkfowId + time: 50 + name: eventRefName + timeouts: + actionExecTimeout: PT777S + stateExecTimeout: + total: PT33M + single: PT123M end: terminate: true - `)) assert.Nil(t, err) + fmt.Println(err) assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) + assert.Nil(t, err) - assert.True(t, strings.Contains(string(b), "eventConditions")) - assert.True(t, strings.Contains(string(b), "\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48}")) + + // workflow and auth metadata assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) - assert.True(t, strings.Contains(string(b), "\"data\":\"${ .patientInfo }\"")) - assert.True(t, strings.Contains(string(b), "\"contextAttributes\":{\"customer\":\"${ .customer }\",\"time\":48}")) - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"data\":{\"result\":\"Hello World, another state!\"}}")) - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) + // Callback state + assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) - t.Run("WorkflowSwitchStateDataConditions", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: Hello World Workflow -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: switch - dataConditions: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: - transition: - nextState: HandleNoVisaDecision -- name: HandleApprovedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleRejectedVisaWorkflowID - end: - terminate: true -- name: HandleNoVisaDecision - type: operation - actions: - - subFlowRef: - workflowId: handleNoVisaDecisionWorkfowId - end: - terminate: true -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow) + // Operation State + assert.True(t, strings.Contains(string(b), "{\"name\":\"HandleApprovedVisa\",\"type\":\"operation\",\"end\":{\"terminate\":true},\"actionMode\":\"sequential\",\"actions\":[{\"name\":\"subFlowRefName\",\"subFlowRef\":{\"workflowId\":\"handleApprovedVisaWorkflowID\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"eventRefName\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .customer }\",\"time\":50},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT123M\",\"total\":\"PT33M\"},\"actionExecTimeout\":\"PT777S\"}}")) - b, err := json.Marshal(workflow) - assert.Nil(t, err) - assert.True(t, strings.Contains(string(b), "dataConditions")) + // Delay State + assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"Hello State\"},\"timeDelay\":\"PT5S\"}")) + + // Event State + assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) + + // Parallel State + assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) + + // Switch State + assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"CheckCreditCallback\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"dataConditions\":null,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) + + // Foreach State + assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) + + // Inject State + assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) + + // Sleep State + assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"transition\":{\"nextState\":\"GetJobStatus\"},\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) workflow = nil err = json.Unmarshal(b, &workflow) assert.Nil(t, err) + }) t.Run("WorkflowSwitchStateDataConditions with wrong field name", func(t *testing.T) { @@ -818,7 +938,7 @@ states: id: helloworld version: '1.0.0' specVersion: '0.8' -name: Hello World Workflow +name: WorkflowSwitchStateDataConditions with wrong field name description: Inject Hello World start: Hello State states: @@ -852,7 +972,7 @@ states: type: operation actions: - subFlowRef: - workflowId: handleNoVisaDecisionWorkfowId + workflowId: handleNoVisaDecisionWorkflowId end: terminate: true `)) diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 336321c..3b0bcf3 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -50,6 +50,7 @@ states: refName: StoreBidFunction arguments: bid: "${ .bid }" + name: funcref1 - eventRef: triggerEventRef: StoreBidFunction data: "${ .patientInfo }" @@ -57,6 +58,7 @@ states: contextAttributes: customer: "${ .customer }" time: 48 + name: eventRefName timeouts: eventTimeout: PT1H actionExecTimeout: PT3S @@ -65,7 +67,7 @@ states: single: PT2S - name: ParallelExec type: parallel - completionType: allOf + completionType: atLeast branches: - name: ShortDelayBranch actions: @@ -81,13 +83,20 @@ states: stateExecTimeout: total: PT1S single: PT2S + numCompleted: 13 - name: CheckVisaStatusSwitchEventBased type: switch eventConditions: - - eventRef: visaApprovedEvent + - name: visaApprovedEvent + eventRef: visaApprovedEventRef transition: HandleApprovedVisa + metadata: + visa: allowed + mastercard: disallowed - eventRef: visaRejectedEvent transition: HandleRejectedVisa + metadata: + test: tested timeouts: eventTimeout: PT1H stateExecTimeout: @@ -134,6 +143,10 @@ states: - name: SendTextForHighPriority type: foreach inputCollection: "${ .messages }" + outputCollection: "${ .outputMessages }" + iterationParam: "${ .this }" + batchSize: 45 + mode: sequential actions: - name: test functionRef: @@ -142,7 +155,7 @@ states: message: "${ .singlemessage }" eventRef: triggerEventRef: example1 - resultEventRef: example1 + resultEventRef: example2 # Added "resultEventTimeout" for action eventref resultEventTimeout: PT12H timeouts: @@ -174,6 +187,10 @@ states: before: PT10S after: PT20S eventRef: CreditCheckCompletedEvent + eventDataFilter: + useData: true + data: "test data" + toStateData: "${ .customer }" timeouts: actionExecTimeout: PT150M eventTimeout: PT34S @@ -188,4 +205,5 @@ states: stateExecTimeout: total: PT100S single: PT200S - end: true + end: + terminate: true diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml index 47a9ff8..8f5447b 100644 --- a/parser/testdata/workflows/greetings.sw.yaml +++ b/parser/testdata/workflows/greetings.sw.yaml @@ -24,6 +24,7 @@ functions: operation: file://myapis/greetingapis.json#greeting states: - name: Greet + id: idx type: operation actionMode: sequential actions: From 87909521f257fad0b94528d0fd906208e39f90ef Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 7 Mar 2023 16:51:29 -0300 Subject: [PATCH 055/110] add simplistic operator-sdk api to test integration (#146) * add simplistic operator-sdk api to test integration Signed-off-by: Spolti * address x/net cve Signed-off-by: Spolti * Update kubernetes/groupversion_info.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: Spolti * Update .github/workflows/Go-SDK-Check-k8s-integration.yaml Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: Spolti * Update kubernetes/k8s_workflow_integration.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: Spolti * Update kubernetes/k8s_workflow_integration.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: Spolti * fix type names Signed-off-by: Spolti * add a example CR for reference only Signed-off-by: Spolti --------- Signed-off-by: Spolti Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> --- .../Go-SDK-Check-k8s-integration.yaml | 58 +++++++++++++++ .gitignore | 5 +- Makefile | 24 ++++++- go.mod | 20 ++++-- go.sum | 45 +++++++++--- kubernetes/groupversion_info.go | 39 ++++++++++ kubernetes/k8s_workflow_integration.go | 72 +++++++++++++++++++ kubernetes/workflow_cr_example.yaml | 61 ++++++++++++++++ 8 files changed, 307 insertions(+), 17 deletions(-) create mode 100644 .github/workflows/Go-SDK-Check-k8s-integration.yaml create mode 100644 kubernetes/groupversion_info.go create mode 100644 kubernetes/k8s_workflow_integration.go create mode 100644 kubernetes/workflow_cr_example.yaml diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml new file mode 100644 index 0000000..2b8c5e5 --- /dev/null +++ b/.github/workflows/Go-SDK-Check-k8s-integration.yaml @@ -0,0 +1,58 @@ +# Copyright 2023 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Go SDK Check k8s integration +on: + pull_request: + paths-ignore: + - "**.md" + - "hack/**" + - "LICENSE" + - "Makefile" + branches: + - main +env: + GO_VERSION: 1.19 +jobs: + basic_checks: + name: Basic Checks + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v3 + - name: Setup Go ${{ env.GO_VERSION }} + uses: actions/setup-go@v3 + with: + go-version: ${{ env.GO_VERSION }} + id: go + - name: Cache dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: Cache tools + uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} + restore-keys: | + ${{ runner.os }}-go-tools- + - name: Check K8s Integration + run: | + export GOPATH=$(go env GOPATH) + make kube-integration \ No newline at end of file diff --git a/.gitignore b/.gitignore index 036f9f5..33227af 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ bin .idea *.out -.vscode \ No newline at end of file +.vscode + +# ignore config directory generated by the controller-gen tool +config diff --git a/Makefile b/Makefile index f713372..ff78471 100644 --- a/Makefile +++ b/Makefile @@ -19,5 +19,25 @@ test: deepcopy @go test ./... .PHONY: deepcopy -deepcopy: $(DEEPCOPY_GEN) ## Download deeepcopy-gen locally if necessary. - ./hack/deepcopy-gen.sh deepcopy \ No newline at end of file +deepcopy: $(DEEPCOPY_GEN) ## Download deepcopy-gen locally if necessary. + ./hack/deepcopy-gen.sh deepcopy + +.PHONY: kube-integration +kube-integration: controller-gen + $(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./..." output:crd:artifacts:config=config/crd/bases + + +#################################### +# install controller-gen tool +## Location to install dependencies to +LOCALBIN ?= $(shell pwd)/bin +$(LOCALBIN): + mkdir -p $(LOCALBIN) + +CONTROLLER_TOOLS_VERSION ?= v0.9.2 +CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen +.PHONY: controller-gen +controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. +$(CONTROLLER_GEN): $(LOCALBIN) + test -s $(LOCALBIN)/controller-gen || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-tools/cmd/controller-gen@$(CONTROLLER_TOOLS_VERSION) + diff --git a/go.mod b/go.mod index de9bc32..ea25056 100644 --- a/go.mod +++ b/go.mod @@ -6,9 +6,10 @@ require ( github.com/go-playground/validator/v10 v10.11.1 github.com/pkg/errors v0.9.1 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 - github.com/stretchr/testify v1.7.0 + github.com/stretchr/testify v1.8.0 gopkg.in/yaml.v3 v3.0.1 - k8s.io/apimachinery v0.25.1 + k8s.io/apimachinery v0.26.2 + sigs.k8s.io/controller-runtime v0.14.4 sigs.k8s.io/yaml v1.3.0 ) @@ -18,15 +19,22 @@ require ( github.com/go-playground/locales v0.14.0 // indirect github.com/go-playground/universal-translator v0.18.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/google/gofuzz v1.1.0 // indirect + github.com/google/gofuzz v1.2.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.2.1 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d // indirect - golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect - golang.org/x/text v0.3.7 // indirect + golang.org/x/net v0.7.0 // indirect + golang.org/x/sys v0.5.0 // indirect + golang.org/x/text v0.7.0 // indirect + gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - k8s.io/klog/v2 v2.70.1 // indirect + k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 // indirect + k8s.io/utils v0.0.0-20221128185143-99ec85e7a448 // indirect sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect ) replace golang.org/x/text => golang.org/x/text v0.3.8 diff --git a/go.sum b/go.sum index d1b738b..e2bb434 100644 --- a/go.sum +++ b/go.sum @@ -15,8 +15,12 @@ github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJ github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= -github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -29,6 +33,13 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/onsi/ginkgo/v2 v2.6.0 h1:9t9b9vRUbFq3C4qKFCGkVuq/fIHji802N1nrtkh1mNc= +github.com/onsi/gomega v1.24.1 h1:KORJXNNTzJXzu4ScJWssJfJMnJ+2QJqhoQSRwNlze9E= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -39,10 +50,15 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 h1:Dz0HrI1AtNSGCE8LXLLqoZU4iuOJXPWndenCsZfstA8= github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46/go.mod h1:is8FVkzSi7PYLWEXT5MgWhglFsyyiW8ffxAoJqfuFZo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= @@ -63,6 +79,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -75,8 +93,9 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= @@ -94,17 +113,27 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/apimachinery v0.25.1 h1:t0XrnmCEHVgJlR2arwO8Awp9ylluDic706WePaYCBTI= -k8s.io/apimachinery v0.25.1/go.mod h1:hqqA1X0bsgsxI6dXsJ4HnNTBOmJNxyPp8dw3u2fSHwA= -k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= -k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/api v0.26.1 h1:f+SWYiPd/GsiWwVRz+NbFyCgvv75Pk9NK6dlkZgpCRQ= +k8s.io/apimachinery v0.26.2 h1:da1u3D5wfR5u2RpLhE/ZtZS2P7QvDgLZTi9wrNZl/tQ= +k8s.io/apimachinery v0.26.2/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= +k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 h1:hWRbsoRWt44OEBnYUd4ceLy4ofBoh+p9vauWp/I5Gdg= +k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/utils v0.0.0-20221128185143-99ec85e7a448 h1:KTgPnR10d5zhztWptI952TNtt/4u5h3IzDXkdIMuo2Y= +k8s.io/utils v0.0.0-20221128185143-99ec85e7a448/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/controller-runtime v0.14.4 h1:Kd/Qgx5pd2XUL08eOV2vwIq3L9GhIbJ5Nxengbd4/0M= +sigs.k8s.io/controller-runtime v0.14.4/go.mod h1:WqIdsAY6JBsjfc/CqO0CORmNtoCtE4S6qbPc9s68h+0= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= +sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/kubernetes/groupversion_info.go b/kubernetes/groupversion_info.go new file mode 100644 index 0000000..9b85567 --- /dev/null +++ b/kubernetes/groupversion_info.go @@ -0,0 +1,39 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package kubernetes contains API Schema definitions for the serverless v1alpha1 API group +// +kubebuilder:object:generate=true +// +groupName=sdk.serverless.workflow +package kubernetes + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "io.serverlessworkflow", Version: "v1alpha1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) + +// Resource takes an unqualified resource and returns a Group qualified GroupResource. +func Resource(resource string) schema.GroupResource { + return GroupVersion.WithResource(resource).GroupResource() +} diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/k8s_workflow_integration.go new file mode 100644 index 0000000..af8b484 --- /dev/null +++ b/kubernetes/k8s_workflow_integration.go @@ -0,0 +1,72 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package kubernetes + +import ( + "github.com/serverlessworkflow/sdk-go/v2/model" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" +) + +// This package provides a very simple api for kubernetes operator to test the integration +// of the Serverless SDK-Go with operator-sdk controller-gen and deepcopy-gen tools. +// The purpose of this integration is to stop issues like below beforehand: +// github.com/serverlessworkflow/sdk-go/model/event.go:51:2: encountered struct field "" without JSON tag in type "Event" +// github.com/serverlessworkflow/sdk-go/model/states.go:66:12: unsupported AST kind *ast.InterfaceType + +// ServerlessWorkflowSpec defines a base API for integration test with operator-sdk +type ServerlessWorkflowSpec struct { + BaseWorkflow model.BaseWorkflow `json:"inline"` + Events []model.Event `json:"events,omitempty"` + Functions []model.Function `json:"functions,omitempty"` + Retries []model.Retry `json:"retries,omitempty"` + States []model.State `json:"states"` +} + +// ServerlessWorkflow ... +// +kubebuilder:object:root=true +// +kubebuilder:object:generate=true +// +kubebuilder:subresource:status +// +k8s:openapi-gen=true +type ServerlessWorkflow struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec ServerlessWorkflowSpec `json:"spec,omitempty"` + Status string `json:"status,omitempty"` +} + +// ServerlessWorkflowList contains a list of SDKServerlessWorkflow +// +kubebuilder:object:root=true +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +type ServerlessWorkflowList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []ServerlessWorkflow `json:"items"` +} + +func (S ServerlessWorkflowList) DeepCopyObject() runtime.Object { + //TODO implement me + panic("implement me") +} + +func (S ServerlessWorkflow) DeepCopyObject() runtime.Object { + //TODO implement me + panic("implement me") +} + +func init() { + SchemeBuilder.Register(&ServerlessWorkflow{}, &ServerlessWorkflowList{}) +} diff --git a/kubernetes/workflow_cr_example.yaml b/kubernetes/workflow_cr_example.yaml new file mode 100644 index 0000000..5d79bfa --- /dev/null +++ b/kubernetes/workflow_cr_example.yaml @@ -0,0 +1,61 @@ +# Copyright 2023 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example on how the CR would look like when importing the sdk go types into your operator's spec +# the cr instantiation would be validated by this issue: https://github.com/serverlessworkflow/sdk-go/issues/152 + +apiVersion: io.serverlessworkflow/v08 +kind: ServerlessWorkflow +metadata: + name: custom.greeting +spec: + functions: + - name: greetFunction + type: custom + operation: sysout + states: + - name: ChooseOnLanguage + type: switch + dataConditions: + - condition: "${ .language == \"English\" }" + transition: GreetInEnglish + - condition: "${ .language == \"Spanish\" }" + transition: GreetInSpanish + - condition: "${ .language == \"Italian\" }" + transition: GreetInItalian + defaultCondition: GreetInEnglish + - name: GreetInEnglish + type: inject + data: + greeting: "Hello from JSON Workflow, " + transition: GreetPerson + - name: GreetInSpanish + type: inject + data: + greeting: "Saludos desde JSON Workflow, " + transition: GreetPerson + - name: GreetInItalian + type: inject + data: + greeting: "Saluti dal JSON Workflow, " + transition: GreetPerson + - name: GreetPerson + type: operation + actions: + - name: greetAction + functionRef: + refName: greetFunction + arguments: + message: ".greeting+.name" + end: true \ No newline at end of file From d76e8d0ee322b0f489a0f61c935189b0f30dd051 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Thu, 9 Mar 2023 12:05:08 -0300 Subject: [PATCH 056/110] Unmarshal state.end when boolean (#155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Unmarshal state.end when boolean Signed-off-by: AndrΓ© R. de Miranda * Change unit test end to boolean Signed-off-by: AndrΓ© R. de Miranda * Add function primitiveOrMapType and used in End.Unmarshal Signed-off-by: AndrΓ© R. de Miranda * Unmarshal Workflow.End direct without using a map Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- model/util.go | 10 ++++++++ model/util_test.go | 46 +++++++++++++++++++++++++++++++++++ model/workflow.go | 26 +++++++------------- model/workflow_test.go | 54 ++++++++++++++++++++++++++++++++++++++++++ parser/parser_test.go | 34 ++++++++++++++++++++------ 5 files changed, 146 insertions(+), 24 deletions(-) diff --git a/model/util.go b/model/util.go index 8796e9c..ebce2aa 100644 --- a/model/util.go +++ b/model/util.go @@ -85,6 +85,7 @@ func requiresNotNilOrEmpty(value interface{}) string { return value.(string) } +// TODO: check the places that use unmarshalString if the case changes for primitiveOrStruct. func unmarshalString(data []byte) (string, error) { var value string if err := json.Unmarshal(data, &value); err != nil { @@ -93,6 +94,15 @@ func unmarshalString(data []byte) (string, error) { return value, nil } +func primitiveOrStruct[T any, U any](data []byte) (valStruct *U, valPrimitive T, err error) { + if data[0] == '{' { + err = json.Unmarshal(data, &valStruct) + } else { + err = json.Unmarshal(data, &valPrimitive) + } + return +} + func unmarshalKey(key string, data map[string]json.RawMessage, output interface{}) error { if _, found := data[key]; found { if err := json.Unmarshal(data[key], output); err != nil { diff --git a/model/util_test.go b/model/util_test.go index ce43d7b..478c7e0 100644 --- a/model/util_test.go +++ b/model/util_test.go @@ -15,6 +15,7 @@ package model import ( + "encoding/json" "testing" "github.com/stretchr/testify/assert" @@ -33,3 +34,48 @@ func TestIncludePaths(t *testing.T) { SetIncludePaths([]string{"1"}) })) } + +func Test_primitiveOrMapType(t *testing.T) { + type dataMap map[string]json.RawMessage + data := []byte(`"value":true`) + _, _, err := primitiveOrStruct[bool, dataMap](data) + assert.Error(t, err) + + data = []byte(`{value":true}`) + _, _, err = primitiveOrStruct[bool, dataMap](data) + assert.Error(t, err) + + data = []byte(`value":true}`) + _, _, err = primitiveOrStruct[bool, dataMap](data) + assert.Error(t, err) + + data = []byte(`"true"`) + _, _, err = primitiveOrStruct[bool, dataMap](data) + assert.Error(t, err) + + data = []byte(`true`) + valMap, valBool, err := primitiveOrStruct[bool, dataMap](data) + assert.NoError(t, err) + assert.Nil(t, valMap) + assert.True(t, valBool) + + data = []byte(`"true"`) + valMap, valString, err := primitiveOrStruct[string, dataMap](data) + assert.NoError(t, err) + assert.Nil(t, valMap) + assert.Equal(t, `true`, valString) + + data = []byte(`{"value":true}`) + valMap, valBool, err = primitiveOrStruct[bool, dataMap](data) + assert.NoError(t, err) + assert.NotNil(t, valMap) + assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) + assert.False(t, valBool) + + data = []byte(`{"value": "true"}`) + valMap, valBool, err = primitiveOrStruct[bool, dataMap](data) + assert.NoError(t, err) + assert.NotNil(t, valMap) + assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) + assert.False(t, valBool) +} diff --git a/model/workflow.go b/model/workflow.go index d3ffd58..c182929 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -508,25 +508,17 @@ type End struct { // UnmarshalJSON ... func (e *End) UnmarshalJSON(data []byte) error { - - endMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &endMap); err != nil { - e.Terminate = false - e.Compensate = false - return nil - } - - if err := unmarshalKey("compensate", endMap, &e.Compensate); err != nil { - return err - } - if err := unmarshalKey("terminate", endMap, &e.Terminate); err != nil { - return err - } - if err := unmarshalKey("produceEvents", endMap, &e.ProduceEvents); err != nil { + type endUnmarshal End + end, endBool, err := primitiveOrStruct[bool, endUnmarshal](data) + if err != nil { return err } - if err := unmarshalKey("continueAs", endMap, &e.ContinueAs); err != nil { - return err + + if end == nil { + e.Terminate = endBool + e.Compensate = false + } else { + *e = End(*end) } return nil diff --git a/model/workflow_test.go b/model/workflow_test.go index 388fe07..32ba486 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -148,3 +148,57 @@ func TestContinueAsUnmarshalJSON(t *testing.T) { }) } } + +func TestEndUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect End + err string + } + testCases := []testCase{ + { + desp: "bool success", + data: `true`, + expect: End{ + Terminate: true, + }, + err: ``, + }, + { + desp: "string fail", + data: `"true"`, + expect: End{}, + err: `json: cannot unmarshal string into Go value of type bool`, + }, + { + desp: `object success`, + data: `{"terminate": true}`, + expect: End{ + Terminate: true, + }, + err: ``, + }, + { + desp: `object key invalid`, + data: `{"terminate_parameter_invalid": true}`, + expect: End{}, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v End + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/parser/parser_test.go b/parser/parser_test.go index 547557a..d9c6f15 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -73,6 +73,8 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "greeting", w.ID) assert.IsType(t, &model.OperationState{}, w.States[0].OperationState) assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) + assert.NotNil(t, w.States[0].End) + assert.True(t, w.States[0].End.Terminate) }, }, { "./testdata/workflows/actiondata-defaultvalue.yaml", @@ -82,6 +84,8 @@ func TestFromFile(t *testing.T) { assert.NotNil(t, w.States[0].OperationState) assert.Equal(t, true, w.States[0].OperationState.Actions[0].ActionDataFilter.UseResults) assert.Equal(t, "greeting", w.States[0].OperationState.Actions[0].Name) + assert.NotNil(t, w.States[0].End) + assert.True(t, w.States[0].End.Terminate) }, }, { "./testdata/workflows/greetings.sw.yaml", @@ -93,6 +97,7 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, w.States[0].OperationState.Actions) assert.NotNil(t, w.States[0].OperationState.Actions[0].FunctionRef) assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) + assert.True(t, w.States[0].End.Terminate) }, }, { "./testdata/workflows/eventbaseddataandswitch.sw.json", @@ -105,11 +110,16 @@ func TestFromFile(t *testing.T) { assert.NotNil(t, w.States[1]) assert.NotNil(t, w.States[1].SwitchState) assert.Equal(t, "PT1H", w.States[1].SwitchState.Timeouts.EventTimeout) + assert.Nil(t, w.States[1].End) + assert.NotNil(t, w.States[2].End) + assert.True(t, w.States[2].End.Terminate) }, }, { "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { operationState := w.States[0].OperationState assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) + assert.NotNil(t, w.States[0].End) + assert.True(t, w.States[0].End.Terminate) }, }, { "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { @@ -121,6 +131,8 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) assert.Equal(t, true, eventState.Exclusive) + assert.NotNil(t, w.States[0].End) + assert.True(t, w.States[0].End.Terminate) }, }, { "./testdata/workflows/eventbasedgreetingexclusive.sw.json", func(t *testing.T, w *model.Workflow) { @@ -277,7 +289,9 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) assert.Equal(t, "checkInboxFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) assert.Equal(t, "SendTextForHighPriority", w.States[0].Transition.NextState) - assert.False(t, w.States[1].End.Terminate) + assert.Nil(t, w.States[0].End) + assert.NotNil(t, w.States[1].End) + assert.True(t, w.States[1].End.Terminate) }, }, { "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { @@ -553,7 +567,6 @@ func TestFromFile(t *testing.T) { file.name, func(t *testing.T) { workflow, err := FromFile(file.name) if assert.NoError(t, err, "Test File %s", file.name) { - assert.NotNil(t, workflow, "Test File %s", file.name) file.f(t, workflow) } @@ -623,17 +636,24 @@ states: "data": { "result": "Hello World!" }, - "end": true - } + "transition": "Next Hello State" + }, + { + "name": "Next Hello State", + "type": "inject", + "data": { + "result": "Next Hello World!" + }, + "end": true + } ] } `)) assert.Nil(t, err) assert.NotNil(t, workflow.Auth) - // TODO correctly marshall end: true (fixed by https://github.com/serverlessworkflow/sdk-go/pull/147) b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{},\"data\":{\"result\":\"Hello World!\"}}]}", + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", string(b)) }) @@ -664,7 +684,7 @@ states: assert.NotNil(t, workflow.Auth) b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{},\"data\":{\"result\":\"Hello World!\"}}]}", + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", string(b)) }) From 6a4e16c0e2f0c74ac992dc7ef76d20603147039b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Mon, 13 Mar 2023 13:32:01 -0300 Subject: [PATCH 057/110] Workflow validations (#156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Workflow validations workflow parameters validations: * id and key: add required_without * start: required, create a custom validator to check if state exists on the workflow states Others: * Workflow.Start: if the start is not defined, use the first workflow state * State.UnmarshalJSON: removed return from switch not need * State.UnmarshalJSON: when state type is nil return a error message Signed-off-by: AndrΓ© R. de Miranda * Not check state exists if start.StateName is empty Signed-off-by: AndrΓ© R. de Miranda * Workflow state requires "State.End" or "State.Transition" Signed-off-by: AndrΓ© R. de Miranda * Remove require from Workflow.Start and remove assert Workflow.Start from parser/parser_test.go Signed-off-by: AndrΓ© R. de Miranda * Move validator to *_validator.go. Changes with validator "transition" and "end" validator: add to BaseState; fix failed unit tests; refactor SwitchState validator. Signed-off-by: AndrΓ© R. de Miranda * Add simple check if transitions exist Signed-off-by: AndrΓ© R. de Miranda * Remove the `NextState` check that is empty. Already been checked as required in the `Transition` struct field. Signed-off-by: AndrΓ© R. de Miranda * Move function validTransitionAndEnd to workflow_validator.go Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- model/callback_state_test.go | 3 + model/delay_state_test.go | 3 + model/event.go | 17 - model/event_test.go | 46 --- model/event_validator.go | 34 ++ model/event_validator_test.go | 66 ++++ model/foreach_state.go | 40 --- model/foreach_state_test.go | 148 -------- model/foreach_state_validator.go | 59 ++++ model/foreach_state_validator_test.go | 183 ++++++++++ model/parallel_state.go | 39 --- model/parallel_state_test.go | 134 ------- model/parallel_state_validator.go | 55 +++ model/parallel_state_validator_test.go | 170 +++++++++ model/retry.go | 23 -- model/retry_test.go | 106 ------ model/retry_validator.go | 39 +++ model/retry_validator_test.go | 120 +++++++ model/sleep_state_test.go | 3 + model/states.go | 16 +- model/states_validator.go | 33 ++ model/states_validator_test.go | 135 +++++++ model/switch_state.go | 57 --- model/switch_state_test.go | 303 ---------------- model/switch_state_validator.go | 59 ++++ model/switch_state_validator_test.go | 329 ++++++++++++++++++ model/workflow.go | 53 +-- model/workflow_test.go | 78 +++-- model/workflow_validator.go | 97 ++++++ model/workflow_validator_test.go | 237 +++++++++++++ parser/parser_test.go | 44 +-- .../workflows/greetings-v08-spec.sw.yaml | 6 +- .../workflows/greetings_sleep.sw.json | 3 +- 33 files changed, 1722 insertions(+), 1016 deletions(-) create mode 100644 model/event_validator.go create mode 100644 model/event_validator_test.go create mode 100644 model/foreach_state_validator.go create mode 100644 model/foreach_state_validator_test.go create mode 100644 model/parallel_state_validator.go create mode 100644 model/parallel_state_validator_test.go create mode 100644 model/retry_validator.go create mode 100644 model/retry_validator_test.go create mode 100644 model/states_validator.go create mode 100644 model/states_validator_test.go create mode 100644 model/switch_state_validator.go create mode 100644 model/switch_state_validator_test.go create mode 100644 model/workflow_validator.go create mode 100644 model/workflow_validator_test.go diff --git a/model/callback_state_test.go b/model/callback_state_test.go index 9566d86..9e3e856 100644 --- a/model/callback_state_test.go +++ b/model/callback_state_test.go @@ -35,6 +35,9 @@ func TestCallbackStateStructLevelValidation(t *testing.T) { BaseState: BaseState{ Name: "callbackTest", Type: StateTypeCallback, + End: &End{ + Terminate: true, + }, }, CallbackState: &CallbackState{ Action: Action{ diff --git a/model/delay_state_test.go b/model/delay_state_test.go index 5521e03..79f49e5 100644 --- a/model/delay_state_test.go +++ b/model/delay_state_test.go @@ -35,6 +35,9 @@ func TestDelayStateStructLevelValidation(t *testing.T) { BaseState: BaseState{ Name: "1", Type: "delay", + End: &End{ + Terminate: true, + }, }, DelayState: &DelayState{ TimeDelay: "PT5S", diff --git a/model/event.go b/model/event.go index 98d3f59..8aac9ae 100644 --- a/model/event.go +++ b/model/event.go @@ -16,11 +16,6 @@ package model import ( "encoding/json" - "reflect" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" - - validator "github.com/go-playground/validator/v10" ) // EventKind defines this event as either `consumed` or `produced` @@ -34,18 +29,6 @@ const ( EventKindProduced EventKind = "produced" ) -func init() { - val.GetValidator().RegisterStructValidation(EventStructLevelValidation, Event{}) -} - -// EventStructLevelValidation custom validator for event kind consumed -func EventStructLevelValidation(structLevel validator.StructLevel) { - event := structLevel.Current().Interface().(Event) - if event.Kind == EventKindConsumed && len(event.Type) == 0 { - structLevel.ReportError(reflect.ValueOf(event.Type), "Type", "type", "reqtypeconsumed", "") - } -} - // Event used to define events and their correlations type Event struct { Common `json:",inline"` diff --git a/model/event_test.go b/model/event_test.go index bb34e08..8f1665b 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -19,54 +19,8 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func TestEventRefStructLevelValidation(t *testing.T) { - type testCase struct { - name string - eventRef EventRef - err string - } - - testCases := []testCase{ - { - name: "valid resultEventTimeout", - eventRef: EventRef{ - TriggerEventRef: "example valid", - ResultEventRef: "example valid", - ResultEventTimeout: "PT1H", - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - name: "invalid resultEventTimeout", - eventRef: EventRef{ - TriggerEventRef: "example invalid", - ResultEventRef: "example invalid red", - ResultEventTimeout: "10hs", - Invoke: InvokeKindSync, - }, - err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'iso8601duration' tag`, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := val.GetValidator().Struct(tc.eventRef) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - }) - } -} - func TestEventRefUnmarshalJSON(t *testing.T) { type testCase struct { desp string diff --git a/model/event_validator.go b/model/event_validator.go new file mode 100644 index 0000000..8d134af --- /dev/null +++ b/model/event_validator.go @@ -0,0 +1,34 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation(eventStructLevelValidation, Event{}) +} + +// eventStructLevelValidation custom validator for event kind consumed +func eventStructLevelValidation(structLevel validator.StructLevel) { + event := structLevel.Current().Interface().(Event) + if event.Kind == EventKindConsumed && len(event.Type) == 0 { + structLevel.ReportError(reflect.ValueOf(event.Type), "Type", "type", "reqtypeconsumed", "") + } +} diff --git a/model/event_validator_test.go b/model/event_validator_test.go new file mode 100644 index 0000000..90caa9c --- /dev/null +++ b/model/event_validator_test.go @@ -0,0 +1,66 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" +) + +func TestEventRefStructLevelValidation(t *testing.T) { + type testCase struct { + name string + eventRef EventRef + err string + } + + testCases := []testCase{ + { + name: "valid resultEventTimeout", + eventRef: EventRef{ + TriggerEventRef: "example valid", + ResultEventRef: "example valid", + ResultEventTimeout: "PT1H", + Invoke: InvokeKindSync, + }, + err: ``, + }, + { + name: "invalid resultEventTimeout", + eventRef: EventRef{ + TriggerEventRef: "example invalid", + ResultEventRef: "example invalid red", + ResultEventTimeout: "10hs", + Invoke: InvokeKindSync, + }, + err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'iso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := val.GetValidator().Struct(tc.eventRef) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/model/foreach_state.go b/model/foreach_state.go index b3ef13e..099c989 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -15,22 +15,12 @@ package model import ( - "context" "encoding/json" "fmt" - "reflect" - "strconv" - validator "github.com/go-playground/validator/v10" "k8s.io/apimachinery/pkg/util/intstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func init() { - val.GetValidator().RegisterStructValidationCtx(ForEachStateStructLevelValidation, ForEachState{}) -} - // ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) type ForEachModeType string @@ -86,36 +76,6 @@ func (f *ForEachState) UnmarshalJSON(data []byte) error { return nil } -// ForEachStateStructLevelValidation custom validator for ForEachState -func ForEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - stateObj := structLevel.Current().Interface().(ForEachState) - - if stateObj.Mode != ForEachModeTypeParallel { - return - } - - if stateObj.BatchSize == nil { - return - } - - switch stateObj.BatchSize.Type { - case intstr.Int: - if stateObj.BatchSize.IntVal <= 0 { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } - case intstr.String: - v, err := strconv.Atoi(stateObj.BatchSize.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", err.Error()) - return - } - - if v <= 0 { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } - } -} - // ForEachStateTimeout defines timeout settings for foreach state type ForEachStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go index 3dcb3f8..3456935 100644 --- a/model/foreach_state_test.go +++ b/model/foreach_state_test.go @@ -19,9 +19,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestForEachStateUnmarshalJSON(t *testing.T) { @@ -71,148 +68,3 @@ func TestForEachStateUnmarshalJSON(t *testing.T) { }) } } - -func TestForEachStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - state State - err string - } - testCases := []testCase{ - { - desp: "normal test & sequential", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeSequential, - }, - }, - err: ``, - }, - { - desp: "normal test & parallel int", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - }, - }, - }, - err: ``, - }, - { - desp: "normal test & parallel string", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", - }, - }, - }, - err: ``, - }, - { - desp: "invalid parallel int", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - }, - }, - }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, - }, - { - desp: "invalid parallel string", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", - }, - }, - }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, - }, - { - desp: "invalid parallel string format", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "a", - }, - }, - }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.state) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/foreach_state_validator.go b/model/foreach_state_validator.go new file mode 100644 index 0000000..6543ded --- /dev/null +++ b/model/foreach_state_validator.go @@ -0,0 +1,59 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "reflect" + "strconv" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "k8s.io/apimachinery/pkg/util/intstr" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(forEachStateStructLevelValidation, ForEachState{}) +} + +// ForEachStateStructLevelValidation custom validator for ForEachState +func forEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + stateObj := structLevel.Current().Interface().(ForEachState) + + if stateObj.Mode != ForEachModeTypeParallel { + return + } + + if stateObj.BatchSize == nil { + return + } + + switch stateObj.BatchSize.Type { + case intstr.Int: + if stateObj.BatchSize.IntVal <= 0 { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") + } + case intstr.String: + v, err := strconv.Atoi(stateObj.BatchSize.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", err.Error()) + return + } + + if v <= 0 { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") + } + } +} diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go new file mode 100644 index 0000000..df01a32 --- /dev/null +++ b/model/foreach_state_validator_test.go @@ -0,0 +1,183 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" +) + +func TestForEachStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + state State + err string + } + testCases := []testCase{ + { + desp: "normal test & sequential", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + End: &End{ + Terminate: true, + }, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeSequential, + }, + }, + err: ``, + }, + { + desp: "normal test & parallel int", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + End: &End{ + Terminate: true, + }, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 1, + }, + }, + }, + err: ``, + }, + { + desp: "normal test & parallel string", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + End: &End{ + Terminate: true, + }, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "1", + }, + }, + }, + err: ``, + }, + { + desp: "invalid parallel int", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + End: &End{ + Terminate: true, + }, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 0, + }, + }, + }, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + { + desp: "invalid parallel string", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + End: &End{ + Terminate: true, + }, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "0", + }, + }, + }, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + { + desp: "invalid parallel string format", + state: State{ + BaseState: BaseState{ + Name: "1", + Type: "2", + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Actions: []Action{ + {}, + }, + Mode: ForEachModeTypeParallel, + BatchSize: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "a", + }, + }, + }, + err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.state) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/parallel_state.go b/model/parallel_state.go index e512ffa..53bce0f 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -15,16 +15,10 @@ package model import ( - "context" "encoding/json" "fmt" - "reflect" - "strconv" - validator "github.com/go-playground/validator/v10" "k8s.io/apimachinery/pkg/util/intstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) // CompletionType define on how to complete branch execution. @@ -109,36 +103,3 @@ type ParallelStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` } - -// ParallelStateStructLevelValidation custom validator for ParallelState -func ParallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - parallelStateObj := structLevel.Current().Interface().(ParallelState) - - if parallelStateObj.CompletionType == CompletionTypeAllOf { - return - } - - switch parallelStateObj.NumCompleted.Type { - case intstr.Int: - if parallelStateObj.NumCompleted.IntVal <= 0 { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") - } - case intstr.String: - v, err := strconv.Atoi(parallelStateObj.NumCompleted.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", err.Error()) - return - } - - if v <= 0 { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") - } - } -} - -func init() { - val.GetValidator().RegisterStructValidationCtx( - ParallelStateStructLevelValidation, - ParallelState{}, - ) -} diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go index c824d3b..b95cc69 100644 --- a/model/parallel_state_test.go +++ b/model/parallel_state_test.go @@ -20,8 +20,6 @@ import ( "github.com/stretchr/testify/assert" "k8s.io/apimachinery/pkg/util/intstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestParallelStateUnmarshalJSON(t *testing.T) { @@ -67,135 +65,3 @@ func TestParallelStateUnmarshalJSON(t *testing.T) { }) } } - -func TestParallelStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - state *State - err string - } - testCases := []testCase{ - { - desp: "normal", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - }, - err: ``, - }, - { - desp: "invalid completeType", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAllOf + "1", - }, - }, - err: `Key: 'State.ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, - }, - { - desp: "invalid numCompleted `int`", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromInt(0), - }, - }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, - }, - { - desp: "invalid numCompleted string format", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("a"), - }, - }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, - }, - { - desp: "normal", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("0"), - }, - }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.state) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/parallel_state_validator.go b/model/parallel_state_validator.go new file mode 100644 index 0000000..5286988 --- /dev/null +++ b/model/parallel_state_validator.go @@ -0,0 +1,55 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "context" + "reflect" + "strconv" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "k8s.io/apimachinery/pkg/util/intstr" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(parallelStateStructLevelValidation, ParallelState{}) +} + +// ParallelStateStructLevelValidation custom validator for ParallelState +func parallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { + parallelStateObj := structLevel.Current().Interface().(ParallelState) + + if parallelStateObj.CompletionType == CompletionTypeAllOf { + return + } + + switch parallelStateObj.NumCompleted.Type { + case intstr.Int: + if parallelStateObj.NumCompleted.IntVal <= 0 { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") + } + case intstr.String: + v, err := strconv.Atoi(parallelStateObj.NumCompleted.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", err.Error()) + return + } + + if v <= 0 { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") + } + } +} diff --git a/model/parallel_state_validator_test.go b/model/parallel_state_validator_test.go new file mode 100644 index 0000000..cc321ae --- /dev/null +++ b/model/parallel_state_validator_test.go @@ -0,0 +1,170 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" +) + +func TestParallelStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + state *State + err string + } + testCases := []testCase{ + { + desp: "normal", + state: &State{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + End: &End{ + Terminate: true, + }, + }, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAllOf, + NumCompleted: intstr.FromInt(1), + }, + }, + err: ``, + }, + { + desp: "invalid completeType", + state: &State{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + End: &End{ + Terminate: true, + }, + }, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAllOf + "1", + }, + }, + err: `Key: 'State.ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, + }, + { + desp: "invalid numCompleted `int`", + state: &State{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + End: &End{ + Terminate: true, + }, + }, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromInt(0), + }, + }, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + { + desp: "invalid numCompleted string format", + state: &State{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + End: &End{ + Terminate: true, + }, + }, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("a"), + }, + }, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + { + desp: "normal", + state: &State{ + BaseState: BaseState{ + Name: "1", + Type: "parallel", + End: &End{ + Terminate: true, + }, + }, + ParallelState: &ParallelState{ + Branches: []Branch{ + { + Name: "b1", + Actions: []Action{ + {}, + }, + }, + }, + CompletionType: CompletionTypeAtLeast, + NumCompleted: intstr.FromString("0"), + }, + }, + err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.state) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/retry.go b/model/retry.go index 2f2e57c..7430adb 100644 --- a/model/retry.go +++ b/model/retry.go @@ -15,22 +15,11 @@ package model import ( - "reflect" - - validator "github.com/go-playground/validator/v10" "k8s.io/apimachinery/pkg/util/intstr" "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func init() { - val.GetValidator().RegisterStructValidation( - RetryStructLevelValidation, - Retry{}, - ) -} - // Retry ... type Retry struct { // Unique retry strategy name @@ -49,15 +38,3 @@ type Retry struct { // TODO: make iso8601duration compatible this type Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` } - -// RetryStructLevelValidation custom validator for Retry Struct -func RetryStructLevelValidation(structLevel validator.StructLevel) { - retryObj := structLevel.Current().Interface().(Retry) - - if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { - err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") - } - } -} diff --git a/model/retry_test.go b/model/retry_test.go index 228345e..c960f3c 100644 --- a/model/retry_test.go +++ b/model/retry_test.go @@ -13,109 +13,3 @@ // limitations under the License. package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func TestRetryStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - retryObj Retry - err string - } - testCases := []testCase{ - { - desp: "normal", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: ``, - }, - { - desp: "normal with all optinal", - retryObj: Retry{ - Name: "1", - }, - err: ``, - }, - { - desp: "missing required name", - retryObj: Retry{ - Name: "", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: `Key: 'Retry.Name' Error:Field validation for 'Name' failed on the 'required' tag`, - }, - { - desp: "invalid delay duration", - retryObj: Retry{ - Name: "1", - Delay: "P5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'iso8601duration' tag`, - }, - { - desp: "invdalid max delay duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "P5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'iso8601duration' tag`, - }, - { - desp: "invalid increment duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "P5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'iso8601duration' tag`, - }, - { - desp: "invalid jitter duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("P5S"), - }, - err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'iso8601duration' tag`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.retryObj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/retry_validator.go b/model/retry_validator.go new file mode 100644 index 0000000..14886ce --- /dev/null +++ b/model/retry_validator.go @@ -0,0 +1,39 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + validator "github.com/go-playground/validator/v10" + "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation(retryStructLevelValidation, Retry{}) +} + +// RetryStructLevelValidation custom validator for Retry Struct +func retryStructLevelValidation(structLevel validator.StructLevel) { + retryObj := structLevel.Current().Interface().(Retry) + + if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { + err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) + if err != nil { + structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") + } + } +} diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go new file mode 100644 index 0000000..78f1e70 --- /dev/null +++ b/model/retry_validator_test.go @@ -0,0 +1,120 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" +) + +func TestRetryStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + retryObj Retry + err string + } + testCases := []testCase{ + { + desp: "normal", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: ``, + }, + { + desp: "normal with all optinal", + retryObj: Retry{ + Name: "1", + }, + err: ``, + }, + { + desp: "missing required name", + retryObj: Retry{ + Name: "", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Name' Error:Field validation for 'Name' failed on the 'required' tag`, + }, + { + desp: "invalid delay duration", + retryObj: Retry{ + Name: "1", + Delay: "P5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'iso8601duration' tag`, + }, + { + desp: "invdalid max delay duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "P5S", + Increment: "PT5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'iso8601duration' tag`, + }, + { + desp: "invalid increment duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "P5S", + Jitter: floatstr.FromString("PT5S"), + }, + err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'iso8601duration' tag`, + }, + { + desp: "invalid jitter duration", + retryObj: Retry{ + Name: "1", + Delay: "PT5S", + MaxDelay: "PT5S", + Increment: "PT5S", + Jitter: floatstr.FromString("P5S"), + }, + err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'iso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.retryObj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go index e6580df..47b6a1e 100644 --- a/model/sleep_state_test.go +++ b/model/sleep_state_test.go @@ -35,6 +35,9 @@ func TestSleepStateStructLevelValidation(t *testing.T) { BaseState: BaseState{ Name: "1", Type: "sleep", + End: &End{ + Terminate: true, + }, }, SleepState: &SleepState{ Duration: "PT10S", diff --git a/model/states.go b/model/states.go index bc3c5df..9862651 100644 --- a/model/states.go +++ b/model/states.go @@ -53,7 +53,7 @@ type BaseState struct { // State type Type StateType `json:"type" validate:"required"` // States error handling and retries definitions - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` + OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` // Next transition of the workflow after the time delay Transition *Transition `json:"transition,omitempty"` // State data filter @@ -187,7 +187,6 @@ func (s *State) MarshalJSON() ([]byte, error) { } func (s *State) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &s.BaseState); err != nil { return err } @@ -204,7 +203,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.DelayState = state - return nil case string(StateTypeEvent): state := &EventState{} @@ -212,7 +210,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.EventState = state - return nil case string(StateTypeOperation): state := &OperationState{} @@ -220,7 +217,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.OperationState = state - return nil case string(StateTypeParallel): state := &ParallelState{} @@ -228,7 +224,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.ParallelState = state - return nil case string(StateTypeSwitch): state := &SwitchState{} @@ -236,7 +231,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.SwitchState = state - return nil case string(StateTypeForEach): state := &ForEachState{} @@ -244,7 +238,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.ForEachState = state - return nil case string(StateTypeInject): state := &InjectState{} @@ -252,7 +245,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.InjectState = state - return nil case string(StateTypeCallback): state := &CallbackState{} @@ -260,7 +252,6 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.CallbackState = state - return nil case string(StateTypeSleep): state := &SleepState{} @@ -268,9 +259,10 @@ func (s *State) UnmarshalJSON(data []byte) error { return err } s.SleepState = state - return nil - + case nil: + return fmt.Errorf("state parameter 'type' not defined") default: return fmt.Errorf("state type %v not supported", mapState["type"]) } + return nil } diff --git a/model/states_validator.go b/model/states_validator.go new file mode 100644 index 0000000..ee55846 --- /dev/null +++ b/model/states_validator.go @@ -0,0 +1,33 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation(baseStateStructLevelValidation, BaseState{}) +} + +func baseStateStructLevelValidation(structLevel validator.StructLevel) { + baseState := structLevel.Current().Interface().(BaseState) + if baseState.Type != StateTypeSwitch { + validTransitionAndEnd(structLevel, reflect.ValueOf(baseState), baseState.Transition, baseState.End) + } +} diff --git a/model/states_validator_test.go b/model/states_validator_test.go new file mode 100644 index 0000000..296f726 --- /dev/null +++ b/model/states_validator_test.go @@ -0,0 +1,135 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" +) + +var stateTransitionDefault = State{ + BaseState: BaseState{ + Name: "name state", + Type: StateTypeOperation, + Transition: &Transition{ + NextState: "next name state", + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{ + {}, + }, + }, +} + +var stateEndDefault = State{ + BaseState: BaseState{ + Name: "name state", + Type: StateTypeOperation, + End: &End{ + Terminate: true, + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{ + {}, + }, + }, +} + +var switchStateTransitionDefault = State{ + BaseState: BaseState{ + Name: "name state", + Type: StateTypeSwitch, + }, + SwitchState: &SwitchState{ + DataConditions: []DataCondition{ + { + Condition: "${ .applicant | .age >= 18 }", + Transition: &Transition{ + NextState: "nex state", + }, + }, + }, + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "nex state", + }, + }, + }, +} + +func TestStateStructLevelValidation(t *testing.T) { + type testCase struct { + name string + instance State + err string + } + + testCases := []testCase{ + { + name: "state transition success", + instance: stateTransitionDefault, + err: ``, + }, + { + name: "state end success", + instance: stateEndDefault, + err: ``, + }, + { + name: "switch state success", + instance: switchStateTransitionDefault, + err: ``, + }, + { + name: "state end and transition", + instance: func() State { + s := stateTransitionDefault + s.End = stateEndDefault.End + return s + }(), + err: `Key: 'State.BaseState.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + { + name: "basestate without end and transition", + instance: func() State { + s := stateTransitionDefault + s.Transition = nil + return s + }(), + err: `Key: 'State.BaseState.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := val.GetValidator().Struct(tc.instance) + + if tc.err != "" { + assert.Error(t, err) + if err != nil { + assert.Equal(t, tc.err, err.Error()) + } + return + } + assert.NoError(t, err) + }) + } +} diff --git a/model/switch_state.go b/model/switch_state.go index 118b18c..cc630bc 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -15,22 +15,9 @@ package model import ( - "context" "encoding/json" - "reflect" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" - - validator "github.com/go-playground/validator/v10" ) -func init() { - val.GetValidator().RegisterStructValidationCtx(SwitchStateStructLevelValidation, SwitchState{}) - val.GetValidator().RegisterStructValidationCtx(DefaultConditionStructLevelValidation, DefaultCondition{}) - val.GetValidator().RegisterStructValidationCtx(EventConditionStructLevelValidation, EventCondition{}) - val.GetValidator().RegisterStructValidationCtx(DataConditionStructLevelValidation, DataCondition{}) -} - // SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. type SwitchState struct { // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. @@ -58,34 +45,12 @@ func (s *SwitchState) MarshalJSON() ([]byte, error) { return custom, err } -// SwitchStateStructLevelValidation custom validator for SwitchState -func SwitchStateStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { - switchState := structLevel.Current().Interface().(SwitchState) - switch { - case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataConditions, eventConditions") - case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataConditions, eventConditions") - } -} - // DefaultCondition Can be either a transition or end definition type DefaultCondition struct { Transition *Transition `json:"transition,omitempty"` End *End `json:"end,omitempty"` } -// DefaultConditionStructLevelValidation custom validator for DefaultCondition -func DefaultConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { - defaultCondition := structLevel.Current().Interface().(DefaultCondition) - switch { - case defaultCondition.End == nil && defaultCondition.Transition == nil: - structLevel.ReportError(reflect.ValueOf(defaultCondition), "Transition", "transition", "required", "must have one of transition, end") - case defaultCondition.Transition != nil && defaultCondition.End != nil: - structLevel.ReportError(reflect.ValueOf(defaultCondition), "Transition", "transition", "exclusive", "must have one of transition, end") - } -} - // SwitchStateTimeout defines the specific timeout settings for switch state type SwitchStateTimeout struct { StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` @@ -111,17 +76,6 @@ type EventCondition struct { Transition *Transition `json:"transition" validate:"omitempty"` } -// EventConditionStructLevelValidation custom validator for EventCondition -func EventConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { - eventCondition := structLevel.Current().Interface().(EventCondition) - switch { - case eventCondition.End == nil && eventCondition.Transition == nil: - structLevel.ReportError(reflect.ValueOf(eventCondition), "Transition", "transition", "required", "must have one of transition, end") - case eventCondition.Transition != nil && eventCondition.End != nil: - structLevel.ReportError(reflect.ValueOf(eventCondition), "Transition", "transition", "exclusive", "must have one of transition, end") - } -} - // DataCondition specify a data-based condition statement which causes a transition to another workflow state // if evaluated to true. type DataCondition struct { @@ -136,14 +90,3 @@ type DataCondition struct { // Workflow transition if condition is evaluated to true Transition *Transition `json:"transition" validate:"omitempty"` } - -// DataConditionStructLevelValidation custom validator for DataCondition -func DataConditionStructLevelValidation(ctx context.Context, structLevel validator.StructLevel) { - dataCondition := structLevel.Current().Interface().(DataCondition) - switch { - case dataCondition.End == nil && dataCondition.Transition == nil: - structLevel.ReportError(reflect.ValueOf(dataCondition), "Transition", "transition", "required", "must have one of transition, end") - case dataCondition.Transition != nil && dataCondition.End != nil: - structLevel.ReportError(reflect.ValueOf(dataCondition), "Transition", "transition", "exclusive", "must have one of transition, end") - } -} diff --git a/model/switch_state_test.go b/model/switch_state_test.go index 3136e4a..c960f3c 100644 --- a/model/switch_state_test.go +++ b/model/switch_state_test.go @@ -13,306 +13,3 @@ // limitations under the License. package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func TestSwitchStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj State - err string - } - testCases := []testCase{ - { - desp: "normal & eventConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, - }, - err: ``, - }, - { - desp: "normal & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, - }, - err: ``, - }, - { - desp: "missing eventConditions & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - }, - }, - err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, - }, - { - desp: "exclusive eventConditions & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, - }, - err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestDefaultConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj DefaultCondition - err string - } - testCases := []testCase{ - { - desp: "normal & end", - obj: DefaultCondition{ - End: &End{}, - }, - err: ``, - }, - { - desp: "normal & transition", - obj: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - err: ``, - }, - { - desp: "missing end & transition", - obj: DefaultCondition{}, - err: `DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, - }, - { - desp: "exclusive end & transition", - obj: DefaultCondition{ - End: &End{}, - Transition: &Transition{ - NextState: "1", - }, - }, - err: `Key: 'DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestEventConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj EventCondition - err string - } - testCases := []testCase{ - { - desp: "normal & end", - obj: EventCondition{ - EventRef: "1", - End: &End{}, - }, - err: ``, - }, - { - desp: "normal & transition", - obj: EventCondition{ - EventRef: "1", - Transition: &Transition{ - NextState: "1", - }, - }, - err: ``, - }, - { - desp: "missing end & transition", - obj: EventCondition{ - EventRef: "1", - }, - err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, - }, - { - desp: "exclusive end & transition", - obj: EventCondition{ - EventRef: "1", - End: &End{}, - Transition: &Transition{ - NextState: "1", - }, - }, - err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestDataConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj DataCondition - err string - } - testCases := []testCase{ - { - desp: "normal & end", - obj: DataCondition{ - Condition: "1", - End: &End{}, - }, - err: ``, - }, - { - desp: "normal & transition", - obj: DataCondition{ - Condition: "1", - Transition: &Transition{ - NextState: "1", - }, - }, - err: ``, - }, - { - desp: "missing end & transition", - obj: DataCondition{ - Condition: "1", - }, - err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, - }, - { - desp: "exclusive end & transition", - obj: DataCondition{ - Condition: "1", - End: &End{}, - Transition: &Transition{ - NextState: "1", - }, - }, - err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/switch_state_validator.go b/model/switch_state_validator.go new file mode 100644 index 0000000..83f1379 --- /dev/null +++ b/model/switch_state_validator.go @@ -0,0 +1,59 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation(switchStateStructLevelValidation, SwitchState{}) + val.GetValidator().RegisterStructValidation(defaultConditionStructLevelValidation, DefaultCondition{}) + val.GetValidator().RegisterStructValidation(eventConditionStructLevelValidation, EventCondition{}) + val.GetValidator().RegisterStructValidation(dataConditionStructLevelValidation, DataCondition{}) +} + +// SwitchStateStructLevelValidation custom validator for SwitchState +func switchStateStructLevelValidation(structLevel validator.StructLevel) { + switchState := structLevel.Current().Interface().(SwitchState) + + switch { + case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataConditions, eventConditions") + case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataConditions, eventConditions") + } +} + +// DefaultConditionStructLevelValidation custom validator for DefaultCondition +func defaultConditionStructLevelValidation(structLevel validator.StructLevel) { + defaultCondition := structLevel.Current().Interface().(DefaultCondition) + validTransitionAndEnd(structLevel, reflect.ValueOf(defaultCondition), defaultCondition.Transition, defaultCondition.End) +} + +// EventConditionStructLevelValidation custom validator for EventCondition +func eventConditionStructLevelValidation(structLevel validator.StructLevel) { + eventCondition := structLevel.Current().Interface().(EventCondition) + validTransitionAndEnd(structLevel, reflect.ValueOf(eventCondition), eventCondition.Transition, eventCondition.End) +} + +// DataConditionStructLevelValidation custom validator for DataCondition +func dataConditionStructLevelValidation(structLevel validator.StructLevel) { + dataCondition := structLevel.Current().Interface().(DataCondition) + validTransitionAndEnd(structLevel, reflect.ValueOf(dataCondition), dataCondition.Transition, dataCondition.End) +} diff --git a/model/switch_state_validator_test.go b/model/switch_state_validator_test.go new file mode 100644 index 0000000..7bddc46 --- /dev/null +++ b/model/switch_state_validator_test.go @@ -0,0 +1,329 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" +) + +func TestSwitchStateStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj State + err string + } + testCases := []testCase{ + { + desp: "normal & eventConditions", + obj: State{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + }, + err: ``, + }, + { + desp: "normal & dataConditions", + obj: State{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + }, + err: ``, + }, + { + desp: "missing eventConditions & dataConditions", + obj: State{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + }, + }, + err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, + }, + { + desp: "exclusive eventConditions & dataConditions", + obj: State{ + BaseState: BaseState{ + Name: "1", + Type: "switch", + }, + SwitchState: &SwitchState{ + DefaultCondition: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + EventConditions: []EventCondition{ + { + EventRef: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + DataConditions: []DataCondition{ + { + Condition: "1", + Transition: &Transition{ + NextState: "2", + }, + }, + }, + }, + }, + err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Equal(t, tc.err, err.Error()) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestDefaultConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj DefaultCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: DefaultCondition{ + End: &End{ + Terminate: true, + }, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: DefaultCondition{ + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: DefaultCondition{}, + err: `DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: DefaultCondition{ + End: &End{ + Terminate: true, + }, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases[2:] { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestEventConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj EventCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: EventCondition{ + EventRef: "1", + End: &End{ + Terminate: true, + }, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: EventCondition{ + EventRef: "1", + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: EventCondition{ + EventRef: "1", + }, + err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: EventCondition{ + EventRef: "1", + End: &End{ + Terminate: true, + }, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + +func TestDataConditionStructLevelValidation(t *testing.T) { + type testCase struct { + desp string + obj DataCondition + err string + } + testCases := []testCase{ + { + desp: "normal & end", + obj: DataCondition{ + Condition: "1", + End: &End{ + Terminate: true, + }, + }, + err: ``, + }, + { + desp: "normal & transition", + obj: DataCondition{ + Condition: "1", + Transition: &Transition{ + NextState: "1", + }, + }, + err: ``, + }, + { + desp: "missing end & transition", + obj: DataCondition{ + Condition: "1", + }, + err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + }, + { + desp: "exclusive end & transition", + obj: DataCondition{ + Condition: "1", + End: &End{ + Terminate: true, + }, + Transition: &Transition{ + NextState: "1", + }, + }, + err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := val.GetValidator().Struct(tc.obj) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index c182929..c61e3ea 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -18,11 +18,6 @@ import ( "bytes" "encoding/json" "fmt" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) // InvokeKind defines how the target is invoked. @@ -55,28 +50,13 @@ const ( UnlimitedTimeout = "unlimited" ) -func init() { - val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) - val.GetValidator().RegisterStructValidation(BaseWorkflowStructLevelValidation, BaseWorkflow{}) -} - -func continueAsStructLevelValidation(structLevel validator.StructLevel) { - continueAs := structLevel.Current().Interface().(ContinueAs) - if len(continueAs.WorkflowExecTimeout.Duration) > 0 { - if err := val.ValidateISO8601TimeDuration(continueAs.WorkflowExecTimeout.Duration); err != nil { - structLevel.ReportError(reflect.ValueOf(continueAs.WorkflowExecTimeout.Duration), - "workflowExecTimeout", "duration", "iso8601duration", "") - } - } -} - // BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces // to make it easy for custom unmarshalers implementations to unmarshal the common data structure. type BaseWorkflow struct { // Workflow unique identifier - ID string `json:"id" validate:"omitempty,min=1"` + ID string `json:"id,omitempty" validate:"required_without=Key"` // Key Domain-specific workflow identifier - Key string `json:"key,omitempty" validate:"omitempty,min=1"` + Key string `json:"key,omitempty" validate:"required_without=ID"` // Workflow name Name string `json:"name,omitempty"` // Workflow description @@ -112,22 +92,6 @@ type BaseWorkflow struct { Auth AuthArray `json:"auth,omitempty" validate:"omitempty"` } -// BaseWorkflowStructLevelValidation custom validator for unique name of the auth methods -func BaseWorkflowStructLevelValidation(structLevel validator.StructLevel) { - // NOTE: we cannot add the custom validation of auth to AuthArray - // because `RegisterStructValidation` only works with struct type - wf := structLevel.Current().Interface().(BaseWorkflow) - dict := map[string]bool{} - - for _, a := range wf.Auth { - if !dict[a.Name] { - dict[a.Name] = true - } else { - structLevel.ReportError(reflect.ValueOf(a.Name), "Name", "name", "reqnameunique", "") - } - } -} - type AuthArray []Auth func (r *AuthArray) UnmarshalJSON(data []byte) error { @@ -186,8 +150,10 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { } var rawStates []json.RawMessage - if err := json.Unmarshal(workflowMap["states"], &rawStates); err != nil { - return err + if _, ok := workflowMap["states"]; ok { + if err := json.Unmarshal(workflowMap["states"], &rawStates); err != nil { + return err + } } w.States = make([]State, len(rawStates)) @@ -197,6 +163,13 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { } } + // if the start is not defined, use the first state + if w.BaseWorkflow.Start == nil && len(w.States) > 0 { + w.BaseWorkflow.Start = &Start{ + StateName: w.States[0].Name, + } + } + if _, ok := workflowMap["events"]; ok { if err := json.Unmarshal(workflowMap["events"], &w.Events); err != nil { var s string diff --git a/model/workflow_test.go b/model/workflow_test.go index 32ba486..c9ad3e9 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -19,56 +19,80 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func TestContinueAsStructLevelValidation(t *testing.T) { +func TestWorkflowStartUnmarshalJSON(t *testing.T) { type testCase struct { - name string - continueAs ContinueAs - err string + desp string + data string + expect Workflow + err string } - testCases := []testCase{ { - name: "valid ContinueAs", - continueAs: ContinueAs{ - WorkflowID: "another-test", - Version: "2", - Data: FromString("${ del(.customerCount) }"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "PT1H", - Interrupt: false, - RunBefore: "test", + desp: "start string", + data: `{"start": "start state name"}`, + expect: Workflow{ + BaseWorkflow: BaseWorkflow{ + ExpressionLang: "jq", + Start: &Start{ + StateName: "start state name", + }, }, + States: []State{}, }, err: ``, }, { - name: "invalid WorkflowExecTimeout", - continueAs: ContinueAs{ - WorkflowID: "test", - Version: "1", - Data: FromString("${ del(.customerCount) }"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "invalid", + desp: "start empty and use the first state", + data: `{"states": [{"name": "start state name", "type": "operation"}]}`, + expect: Workflow{ + BaseWorkflow: BaseWorkflow{ + ExpressionLang: "jq", + Start: &Start{ + StateName: "start state name", + }, + }, + States: []State{ + { + BaseState: BaseState{ + Name: "start state name", + Type: StateTypeOperation, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + }, + }, }, }, - err: `Key: 'ContinueAs.workflowExecTimeout' Error:Field validation for 'workflowExecTimeout' failed on the 'iso8601duration' tag`, + err: ``, + }, + { + desp: "start empty, and states empty", + data: `{"states": []}`, + expect: Workflow{ + BaseWorkflow: BaseWorkflow{ + ExpressionLang: "jq", + }, + States: []State{}, + }, + err: ``, }, } - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := val.GetValidator().Struct(tc.continueAs) + for _, tc := range testCases[1:] { + t.Run(tc.desp, func(t *testing.T) { + var v Workflow + err := json.Unmarshal([]byte(tc.data), &v) if tc.err != "" { assert.Error(t, err) assert.Regexp(t, tc.err, err) return } + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) }) } } diff --git a/model/workflow_validator.go b/model/workflow_validator.go new file mode 100644 index 0000000..68f8096 --- /dev/null +++ b/model/workflow_validator.go @@ -0,0 +1,97 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "reflect" + + validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) + val.GetValidator().RegisterStructValidation(workflowStructLevelValidation, Workflow{}) +} + +func continueAsStructLevelValidation(structLevel validator.StructLevel) { + continueAs := structLevel.Current().Interface().(ContinueAs) + if len(continueAs.WorkflowExecTimeout.Duration) > 0 { + if err := val.ValidateISO8601TimeDuration(continueAs.WorkflowExecTimeout.Duration); err != nil { + structLevel.ReportError(reflect.ValueOf(continueAs.WorkflowExecTimeout.Duration), + "workflowExecTimeout", "duration", "iso8601duration", "") + } + } +} + +// WorkflowStructLevelValidation custom validator +func workflowStructLevelValidation(structLevel validator.StructLevel) { + // unique name of the auth methods + // NOTE: we cannot add the custom validation of auth to AuthArray + // because `RegisterStructValidation` only works with struct type + wf := structLevel.Current().Interface().(Workflow) + dict := map[string]bool{} + + for _, a := range wf.BaseWorkflow.Auth { + if !dict[a.Name] { + dict[a.Name] = true + } else { + structLevel.ReportError(reflect.ValueOf(a.Name), "[]Auth.Name", "name", "reqnameunique", "") + } + } + + startAndStatesTransitionValidator(structLevel, wf.BaseWorkflow.Start, wf.States) +} + +func startAndStatesTransitionValidator(structLevel validator.StructLevel, start *Start, states []State) { + statesMap := make(map[string]State, len(states)) + for _, state := range states { + statesMap[state.Name] = state + } + + if start != nil { + // if not exists the start transtion stop the states validations + if _, ok := statesMap[start.StateName]; !ok { + structLevel.ReportError(reflect.ValueOf(start), "Start", "start", "startnotexist", "") + return + } + } + + if len(states) == 1 { + return + } + + // Naive check if transitions exist + for _, state := range statesMap { + if state.Transition != nil { + if _, ok := statesMap[state.Transition.NextState]; !ok { + structLevel.ReportError(reflect.ValueOf(state), "Transition", "transition", "transitionnotexists", state.Transition.NextState) + } + } + } + + // TODO: create states graph to complex check +} + +func validTransitionAndEnd(structLevel validator.StructLevel, field interface{}, transition *Transition, end *End) { + hasTransition := transition != nil + isEnd := end != nil && (end.Terminate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end + + if !hasTransition && !isEnd { + structLevel.ReportError(field, "Transition", "transition", "required", "must have one of transition, end") + } else if hasTransition && isEnd { + structLevel.ReportError(field, "Transition", "transition", "exclusive", "must have one of transition, end") + } +} diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go new file mode 100644 index 0000000..451d87f --- /dev/null +++ b/model/workflow_validator_test.go @@ -0,0 +1,237 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +var workflowStructDefault = Workflow{ + BaseWorkflow: BaseWorkflow{ + ID: "id", + SpecVersion: "0.8", + Auth: AuthArray{ + { + Name: "auth name", + }, + }, + Start: &Start{ + StateName: "name state", + }, + }, + States: []State{ + { + BaseState: BaseState{ + Name: "name state", + Type: StateTypeOperation, + Transition: &Transition{ + NextState: "next name state", + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{ + {}, + }, + }, + }, + { + BaseState: BaseState{ + Name: "next name state", + Type: StateTypeOperation, + End: &End{ + Terminate: true, + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{ + {}, + }, + }, + }, + }, +} + +var listStateTransition1 = []State{ + { + BaseState: BaseState{ + Name: "name state", + Type: StateTypeOperation, + Transition: &Transition{ + NextState: "next name state", + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{{}}, + }, + }, + { + BaseState: BaseState{ + Name: "next name state", + Type: StateTypeOperation, + Transition: &Transition{ + NextState: "next name state 2", + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{{}}, + }, + }, + { + BaseState: BaseState{ + Name: "next name state 2", + Type: StateTypeOperation, + End: &End{ + Terminate: true, + }, + }, + OperationState: &OperationState{ + ActionMode: "sequential", + Actions: []Action{{}}, + }, + }, +} + +func TestWorkflowStructLevelValidation(t *testing.T) { + type testCase[T any] struct { + name string + instance T + err string + } + testCases := []testCase[any]{ + { + name: "workflow success", + instance: workflowStructDefault, + }, + { + name: "workflow auth.name repeat", + instance: func() Workflow { + w := workflowStructDefault + w.Auth = append(w.Auth, w.Auth[0]) + return w + }(), + err: `Key: 'Workflow.[]Auth.Name' Error:Field validation for '[]Auth.Name' failed on the 'reqnameunique' tag`, + }, + { + name: "workflow id exclude key", + instance: func() Workflow { + w := workflowStructDefault + w.ID = "id" + w.Key = "" + return w + }(), + err: ``, + }, + { + name: "workflow key exclude id", + instance: func() Workflow { + w := workflowStructDefault + w.ID = "" + w.Key = "key" + return w + }(), + err: ``, + }, + { + name: "workflow id and key", + instance: func() Workflow { + w := workflowStructDefault + w.ID = "id" + w.Key = "key" + return w + }(), + err: ``, + }, + { + name: "workflow without id and key", + instance: func() Workflow { + w := workflowStructDefault + w.ID = "" + w.Key = "" + return w + }(), + err: `Key: 'Workflow.BaseWorkflow.ID' Error:Field validation for 'ID' failed on the 'required_without' tag +Key: 'Workflow.BaseWorkflow.Key' Error:Field validation for 'Key' failed on the 'required_without' tag`, + }, + { + name: "workflow start", + instance: func() Workflow { + w := workflowStructDefault + w.Start = &Start{ + StateName: "start state not found", + } + return w + }(), + err: `Key: 'Workflow.Start' Error:Field validation for 'Start' failed on the 'startnotexist' tag`, + }, + { + name: "workflow states transitions", + instance: func() Workflow { + w := workflowStructDefault + w.States = listStateTransition1 + return w + }(), + err: ``, + }, + { + name: "valid ContinueAs", + instance: ContinueAs{ + WorkflowID: "another-test", + Version: "2", + Data: FromString("${ del(.customerCount) }"), + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "PT1H", + Interrupt: false, + RunBefore: "test", + }, + }, + err: ``, + }, + { + name: "invalid WorkflowExecTimeout", + instance: ContinueAs{ + WorkflowID: "test", + Version: "1", + Data: FromString("${ del(.customerCount) }"), + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "invalid", + }, + }, + err: `Key: 'ContinueAs.workflowExecTimeout' Error:Field validation for 'workflowExecTimeout' failed on the 'iso8601duration' tag`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := val.GetValidator().Struct(tc.instance) + + if tc.err != "" { + assert.Error(t, err) + if err != nil { + assert.Equal(t, tc.err, err.Error()) + } + return + } + assert.NoError(t, err) + }) + } +} diff --git a/parser/parser_test.go b/parser/parser_test.go index d9c6f15..a11106f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -16,7 +16,6 @@ package parser import ( "encoding/json" - "fmt" "os" "path/filepath" "strings" @@ -39,9 +38,10 @@ func TestBasicValidation(t *testing.T) { for _, file := range files { if !file.IsDir() { - workflow, err := FromFile(filepath.Join(rootPath, file.Name())) + path := filepath.Join(rootPath, file.Name()) + workflow, err := FromFile(path) - if assert.NoError(t, err, "Test File %s", file.Name()) { + if assert.NoError(t, err, "Test File %s", path) { assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) } @@ -379,8 +379,6 @@ func TestFromFile(t *testing.T) { // Workflow "name" no longer a required property assert.Empty(t, w.Name) - // Workflow "start" no longer a required property - assert.Empty(t, w.Start) // Functions: assert.NotEmpty(t, w.Functions[0]) @@ -557,7 +555,6 @@ func TestFromFile(t *testing.T) { assert.NotNil(t, w.States[9].SleepState.Timeouts) assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, &model.Transition{NextState: "GetJobStatus"}, w.States[9].Transition) assert.Equal(t, true, w.States[9].End.Terminate) }, }, @@ -610,7 +607,7 @@ states: "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", - "start": "CheckApplication", + "start": "Hello State", "specVersion": "0.8", "auth": [ { @@ -653,7 +650,7 @@ states: assert.NotNil(t, workflow.Auth) b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", string(b)) }) @@ -665,7 +662,7 @@ states: "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", - "start": "CheckApplication", + "start": "Hello State", "specVersion": "0.8", "auth": "./testdata/workflows/urifiles/auth.json", "states": [ @@ -684,7 +681,7 @@ states: assert.NotNil(t, workflow.Auth) b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"CheckApplication\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", + assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", string(b)) }) @@ -696,7 +693,7 @@ states: "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", - "start": "CheckApplication", + "start": "Hello State", "specVersion": "0.7", "auth": 123, "states": [ @@ -726,7 +723,7 @@ version: '1.0.0' specVersion: '0.8' name: WorkflowStatesTest description: Inject Hello World -start: Hello State +start: GreetDelay metadata: metadata1: metadata1 metadata2: metadata2 @@ -743,7 +740,7 @@ states: type: delay timeDelay: PT5S transition: - nextState: Hello State + nextState: StoreCarAuctionBid - name: StoreCarAuctionBid type: event exclusive: true @@ -775,6 +772,7 @@ states: stateExecTimeout: total: PT1S single: PT2S + transition: ParallelExec - name: ParallelExec type: parallel completionType: atLeast @@ -794,6 +792,7 @@ states: total: PT1S single: PT2S numCompleted: 13 + transition: CheckVisaStatusSwitchEventBased - name: CheckVisaStatusSwitchEventBased type: switch eventConditions: @@ -840,6 +839,7 @@ states: stateExecTimeout: total: PT11S single: PT22S + transition: HelloInject - name: HelloInject type: inject data: @@ -848,10 +848,10 @@ states: stateExecTimeout: total: PT11M single: PT22M + transition: WaitForCompletionSleep - name: WaitForCompletionSleep type: sleep duration: PT5S - transition: GetJobStatus timeouts: stateExecTimeout: total: PT100S @@ -887,6 +887,7 @@ states: stateExecTimeout: total: PT115M single: PT22M + transition: HandleApprovedVisa - name: HandleApprovedVisa type: operation actions: @@ -910,7 +911,6 @@ states: terminate: true `)) assert.Nil(t, err) - fmt.Println(err) assert.NotNil(t, workflow) b, err := json.Marshal(workflow) @@ -921,31 +921,31 @@ states: assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) // Callback state - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"},\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) // Operation State assert.True(t, strings.Contains(string(b), "{\"name\":\"HandleApprovedVisa\",\"type\":\"operation\",\"end\":{\"terminate\":true},\"actionMode\":\"sequential\",\"actions\":[{\"name\":\"subFlowRefName\",\"subFlowRef\":{\"workflowId\":\"handleApprovedVisaWorkflowID\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"eventRefName\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .customer }\",\"time\":50},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT123M\",\"total\":\"PT33M\"},\"actionExecTimeout\":\"PT777S\"}}")) // Delay State - assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"Hello State\"},\"timeDelay\":\"PT5S\"}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"StoreCarAuctionBid\"},\"timeDelay\":\"PT5S\"}")) // Event State - assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"transition\":{\"nextState\":\"ParallelExec\"},\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) // Parallel State - assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) // Switch State assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"CheckCreditCallback\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"dataConditions\":null,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) // Foreach State - assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"transition\":{\"nextState\":\"HelloInject\"},\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) // Inject State - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"transition\":{\"nextState\":\"WaitForCompletionSleep\"},\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) // Sleep State - assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"transition\":{\"nextState\":\"GetJobStatus\"},\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) workflow = nil err = json.Unmarshal(b, &workflow) diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 3b0bcf3..71800b0 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -65,6 +65,7 @@ states: stateExecTimeout: total: PT1S single: PT2S + transition: ParallelExec - name: ParallelExec type: parallel completionType: atLeast @@ -84,6 +85,7 @@ states: total: PT1S single: PT2S numCompleted: 13 + transition: CheckVisaStatusSwitchEventBased - name: CheckVisaStatusSwitchEventBased type: switch eventConditions: @@ -163,6 +165,7 @@ states: stateExecTimeout: total: PT11S single: PT22S + transition: HelloInject - name: HelloInject type: inject data: @@ -171,6 +174,7 @@ states: stateExecTimeout: total: PT11M single: PT22M + transition: CheckCreditCallback - name: CheckCreditCallback type: callback action: @@ -197,10 +201,10 @@ states: stateExecTimeout: total: PT115M single: PT22M + transition: WaitForCompletionSleep - name: WaitForCompletionSleep type: sleep duration: PT5S - transition: GetJobStatus timeouts: stateExecTimeout: total: PT100S diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json index 5330bc5..9a434d4 100644 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ b/parser/testdata/workflows/greetings_sleep.sw.json @@ -20,7 +20,8 @@ "timeouts": { "stateExecTimeout": "PT10S" }, - "duration": "PT40S" + "duration": "PT40S", + "transition": "Greet" }, { "name": "Greet", From d6ebe51ce842368bac5cc81163c12553f0f3c6dc Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 14 Mar 2023 09:53:32 -0300 Subject: [PATCH 058/110] Add kubebuilder annotations and fix model descriptions (#160) * temo Signed-off-by: Spolti * Include kubebuilder validation annotations Signed-off-by: Spolti * review suggestions Signed-off-by: Spolti * review suggestions Signed-off-by: Spolti * review additions Signed-off-by: Spolti * Update model/function.go Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Signed-off-by: Spolti --------- Signed-off-by: Spolti Co-authored-by: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> --- model/action.go | 60 +++++++--- model/action_data_filter.go | 20 ++-- model/auth.go | 62 +++++++--- model/callback_state.go | 25 ++-- model/common.go | 1 + model/delay_state.go | 1 + model/event.go | 46 ++++--- model/event_data_filter.go | 13 +- model/event_state.go | 40 +++++-- model/foreach_state.go | 32 +++-- model/function.go | 18 ++- model/inject_state.go | 6 +- model/operation_state.go | 14 ++- model/parallel_state.go | 32 +++-- model/retry.go | 2 + model/sleep_state.go | 4 + model/state_exec_timeout.go | 2 + model/states.go | 74 ++++++++---- model/switch_state.go | 55 ++++++--- model/workflow.go | 213 +++++++++++++++++++++++---------- model/workflow_ref.go | 15 ++- model/zz_generated.deepcopy.go | 1 + 22 files changed, 517 insertions(+), 219 deletions(-) diff --git a/model/action.go b/model/action.go index f33bcaf..ddf026b 100644 --- a/model/action.go +++ b/model/action.go @@ -22,27 +22,43 @@ import ( // Action specify invocations of services or other workflows during workflow execution. type Action struct { - // ID defines Unique action identifier + // Defines Unique action identifier. + // +optional ID string `json:"id,omitempty"` - // Name defines Unique action definition name + // Defines Unique action name. + // +optional Name string `json:"name,omitempty"` - // FunctionRef references a reusable function definition + // References a reusable function definition. + // +optional FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // EventRef references a 'trigger' and 'result' reusable event definitions + // References a 'trigger' and 'result' reusable event definitions. + // +optional EventRef *EventRef `json:"eventRef,omitempty"` - // References a sub-workflow to be executed + // References a workflow to be invoked. + // +optional SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` - // Sleep Defines time period workflow execution should sleep before / after function execution + // Defines time period workflow execution should sleep before / after function execution. + // +optional Sleep *Sleep `json:"sleep,omitempty"` - // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed + // References a defined workflow retry definition. If not defined uses the default runtime retry definition. + // +optional RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` + // List of unique references to defined workflow errors for which the action should not be retried. + // Used only when `autoRetries` is set to `true` + // +optional NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. Used only when `autoRetries` is set to `false` + // List of unique references to defined workflow errors for which the action should be retried. + // Used only when `autoRetries` is set to `false` + // +optional RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Action data filter + // Filter the state data to select only the data that can be used within function definition arguments + // using its fromStateData property. Filter the action results to select only the result data that should + // be added/merged back into the state data using its results property. Select the part of state data which + // the action data results should be added/merged to using the toStateData property. + // +optional ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false + // Expression, if defined, must evaluate to true for this action to be performed. If false, action is disregarded. + // +optional Condition string `json:"condition,omitempty"` } @@ -65,16 +81,20 @@ func (a *Action) UnmarshalJSON(data []byte) error { // FunctionRef defines the reference to a reusable function definition type FunctionRef struct { - // Name of the referenced function + // Name of the referenced function. + // +kubebuilder:validation:Required RefName string `json:"refName" validate:"required"` - // Function arguments + // Arguments (inputs) to be passed to the referenced function + // +optional // TODO: validate it as required if function type is graphql Arguments map[string]Object `json:"arguments,omitempty"` - // String containing a valid GraphQL selection set + // Used if function type is graphql. String containing a valid GraphQL selection set. // TODO: validate it as required if function type is graphql + // +optional SelectionSet string `json:"selectionSet,omitempty"` - // Invoke specifies if the subflow should be invoked sync or async. - // Defaults to sync. + // Specifies if the function should be invoked sync or async. Default is sync. + // +kubebuilder:validation:Enum=async;sync + // +kubebuilder:default=sync Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` } @@ -113,8 +133,12 @@ func (f *FunctionRef) UnmarshalJSON(data []byte) error { // Sleep defines time periods workflow execution should sleep before & after function execution type Sleep struct { - // Before defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. + // Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + // Does not apply if 'eventRef' is defined. + // +optional Before string `json:"before,omitempty" validate:"omitempty,iso8601duration"` - // After defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. + // Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + // Does not apply if 'eventRef' is defined. + // +optional After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/action_data_filter.go b/model/action_data_filter.go index 4dcf6ac..ffd478f 100644 --- a/model/action_data_filter.go +++ b/model/action_data_filter.go @@ -21,17 +21,23 @@ import ( ) // ActionDataFilter used to filter action data results. +// +optional +// +optional type ActionDataFilter struct { - // Workflow expression that selects state data that the state action can use + // Workflow expression that filters state data that can be used by the action. + // +optional + // +optional FromStateData string `json:"fromStateData,omitempty"` - - // UseResults represent where action data results is added/merged to state data. If it's false, results & toStateData should be ignored. - // Defaults to true. + // If set to false, action data results are not added/merged to state data. In this case 'results' + // and 'toStateData' should be ignored. Default is true. + // +optional UseResults bool `json:"useResults,omitempty"` - - // Workflow expression that filters the actions' data results + // Workflow expression that filters the actions data results. + // +optional Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element + // Workflow expression that selects a state data element to which the action results should be + // added/merged into. If not specified denotes the top-level state data element. + // +optional ToStateData string `json:"toStateData,omitempty"` } diff --git a/model/auth.go b/model/auth.go index e885eae..41565d9 100644 --- a/model/auth.go +++ b/model/auth.go @@ -20,7 +20,7 @@ import ( "strings" ) -// AuthType ... +// AuthType can be "basic", "bearer", or "oauth2". Default is "basic" type AuthType string const ( @@ -44,13 +44,21 @@ const ( GrantTypeTokenExchange GrantType = "tokenExchange" ) -// Auth ... +// Auth definitions can be used to define authentication information that should be applied to resources +// defined in the operation property of function definitions. It is not used as authentication information +// for the function invocation, but just to access the resource containing the function invocation information. type Auth struct { - // Name Unique auth definition name + // Unique auth definition name. + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` - // Scheme Defines the auth type - Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` - // Properties ... + // Auth scheme, can be "basic", "bearer", or "oauth2". Default is "basic" + // +kubebuilder:validation:Enum=basic;bearer;oauth2 + // +kubebuilder:default=basic + // +kubebuilder:validation:Required + Scheme AuthType `json:"scheme" validate:"min=1"` + // Auth scheme properties. Can be one of "Basic properties definition", "Bearer properties definition", + // or "OAuth2 properties definition" + // +kubebuilder:validation:Required Properties AuthProperties `json:"properties" validate:"required"` } @@ -145,10 +153,13 @@ type AuthProperties struct { type BasicAuthProperties struct { Common `json:",inline"` // Secret Expression referencing a workflow secret that contains all needed auth info + // +optional Secret string `json:"secret,omitempty"` // Username String or a workflow expression. Contains the username + // +kubebuilder:validation:Required Username string `json:"username" validate:"required"` // Password String or a workflow expression. Contains the user password + // +kubebuilder:validation:Required Password string `json:"password" validate:"required"` } @@ -177,8 +188,10 @@ func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { type BearerAuthProperties struct { Common `json:",inline"` // Secret Expression referencing a workflow secret that contains all needed auth info + // +optional Secret string `json:"secret,omitempty"` // Token String or a workflow expression. Contains the token + // +kubebuilder:validation:Required Token string `json:"token" validate:"required"` } @@ -203,29 +216,42 @@ func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { // OAuth2AuthProperties OAuth2 information type OAuth2AuthProperties struct { Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info + // Expression referencing a workflow secret that contains all needed auth info. + // +optional Secret string `json:"secret,omitempty"` - // Authority String or a workflow expression. Contains the authority information + // String or a workflow expression. Contains the authority information. + // +optional Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` - // GrantType Defines the grant type + // Defines the grant type. Can be "password", "clientCredentials", or "tokenExchange" + // +kubebuilder:validation:Enum=password;clientCredentials;tokenExchange + // +kubebuilder:validation:Required GrantType GrantType `json:"grantType" validate:"required"` - // ClientID String or a workflow expression. Contains the client identifier + // String or a workflow expression. Contains the client identifier. + // +kubebuilder:validation:Required ClientID string `json:"clientId" validate:"required"` - // ClientSecret Workflow secret or a workflow expression. Contains the client secret + // Workflow secret or a workflow expression. Contains the client secret. + // +optional ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` - // Scopes Array containing strings or workflow expressions. Contains the OAuth2 scopes + // Array containing strings or workflow expressions. Contains the OAuth2 scopes. + // +optional Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` - // Username String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner' + // String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner'. + // +optional Username string `json:"username,omitempty" validate:"omitempty,min=1"` - // Password String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner' + // String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner'. + // +optional Password string `json:"password,omitempty" validate:"omitempty,min=1"` - // Audiences Array containing strings or workflow expressions. Contains the OAuth2 audiences + // Array containing strings or workflow expressions. Contains the OAuth2 audiences. + // +optional Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` - // SubjectToken String or a workflow expression. Contains the subject token + // String or a workflow expression. Contains the subject token. + // +optional SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` - // RequestedSubject String or a workflow expression. Contains the requested subject + // String or a workflow expression. Contains the requested subject. + // +optional RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` - // RequestedIssuer String or a workflow expression. Contains the requested issuer + // String or a workflow expression. Contains the requested issuer. + // +optional RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` } diff --git a/model/callback_state.go b/model/callback_state.go index 5a05f8a..f35ec38 100644 --- a/model/callback_state.go +++ b/model/callback_state.go @@ -18,16 +18,19 @@ import ( "encoding/json" ) -// CallbackState executes a function and waits for callback event that indicates -// completion of the task. +// CallbackState executes a function and waits for callback event that indicates completion of the task. type CallbackState struct { - // Defines the action to be executed + // Defines the action to be executed. + // +kubebuilder:validation:Required Action Action `json:"action" validate:"required"` - // References a unique callback event name in the defined workflow events + // References a unique callback event name in the defined workflow events. + // +kubebuilder:validation:Required EventRef string `json:"eventRef" validate:"required"` // Time period to wait for incoming events (ISO 8601 format) + // +optional Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - // Event data filter + // Event data filter definition. + // +optional EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` } @@ -45,7 +48,13 @@ func (c *CallbackState) MarshalJSON() ([]byte, error) { // CallbackStateTimeout defines timeout settings for callback state type CallbackStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // Default single actions definition execution timeout (ISO 8601 duration format) + // +optional + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default timeout for consuming defined events (ISO 8601 duration format) + // +optional + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/common.go b/model/common.go index 28b2045..6a9be3b 100644 --- a/model/common.go +++ b/model/common.go @@ -17,6 +17,7 @@ package model // Common schema for Serverless Workflow specification type Common struct { // Metadata information + // +optional Metadata Metadata `json:"metadata,omitempty"` } diff --git a/model/delay_state.go b/model/delay_state.go index 942216a..3227e74 100644 --- a/model/delay_state.go +++ b/model/delay_state.go @@ -19,6 +19,7 @@ import "encoding/json" // DelayState Causes the workflow execution to delay for a specified duration type DelayState struct { // Amount of time (ISO 8601 format) to delay + // +kubebuilder:validation:Required TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` } diff --git a/model/event.go b/model/event.go index 8aac9ae..f366eec 100644 --- a/model/event.go +++ b/model/event.go @@ -32,19 +32,25 @@ const ( // Event used to define events and their correlations type Event struct { Common `json:",inline"` - // Unique event name + // Unique event name. + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` - // CloudEvent source + // CloudEvent source. + // +optional Source string `json:"source,omitempty"` - // CloudEvent type + // CloudEvent type. + // +kubebuilder:validation:Required Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. - // Defaults to `consumed` + // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Defaults to `consumed`. + // +kubebuilder:validation:Enum=consumed;produced + // +kubebuilder:default=consumed Kind EventKind `json:"kind,omitempty"` - // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload and context attributes should be accessible" - // Defaults to true + // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload + // and context attributes should be accessible. Defaults to true. + // +optional DataOnly bool `json:"dataOnly,omitempty"` - // CloudEvent correlation definitions + // Define event correlation rules for this event. Only used for consumed events. + // +optional Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` } @@ -68,26 +74,36 @@ func (e *Event) UnmarshalJSON(data []byte) error { // Correlation define event correlation rules for an event. Only used for `consumed` events type Correlation struct { // CloudEvent Extension Context Attribute name + // +kubebuilder:validation:Required ContextAttributeName string `json:"contextAttributeName" validate:"required"` // CloudEvent Extension Context Attribute value + // +optional ContextAttributeValue string `json:"contextAttributeValue,omitempty"` } // EventRef defining invocation of a function via event type EventRef struct { - // Reference to the unique name of a 'produced' event definition + // Reference to the unique name of a 'produced' event definition, + // +kubebuilder:validation:Required TriggerEventRef string `json:"triggerEventRef" validate:"required"` // Reference to the unique name of a 'consumed' event definition + // +kubebuilder:validation:Required ResultEventRef string `json:"resultEventRef" validate:"required"` - // ResultEventTimeout defines maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the actionExecutionTimeout + // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + // actionExecutionTimeout + // +optional ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by 'triggerEventRef'. - // If object type, a custom object to become the data (payload) of the event referenced by 'triggerEventRef'. + // If string type, an expression which selects parts of the states data output to become the data (payload) + // of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + // of the event referenced by triggerEventRef. + // +optional Data *Object `json:"data,omitempty"` - // Add additional extension context attributes to the produced event + // Add additional extension context attributes to the produced event. + // +optional ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` - // Invoke specifies if the subflow should be invoked sync or async. - // Defaults to sync. + // Specifies if the function should be invoked sync or async. Default is sync. + // +kubebuilder:validation:Enum=async;sync + // +kubebuilder:default=sync Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` } diff --git a/model/event_data_filter.go b/model/event_data_filter.go index 2f5b093..b0b3e59 100644 --- a/model/event_data_filter.go +++ b/model/event_data_filter.go @@ -22,13 +22,16 @@ import ( // EventDataFilter used to filter consumed event payloads. type EventDataFilter struct { - // UseData represent where event payload is added/merged to state data. If it's false, data & toStateData - // should be ignored. Defaults to true. + // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' + // should be ignored. Default is true. + // +optional UseData bool `json:"useData,omitempty"` - // Workflow expression that filters of the event data (payload) + // Workflow expression that filters of the event data (payload). + // +optional Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the event payload should be added/merged into. - // If not specified, denotes, the top-level state data element. + // Workflow expression that selects a state data element to which the action results should be added/merged into. + // If not specified denotes the top-level state data element + // +optional ToStateData string `json:"toStateData,omitempty"` } diff --git a/model/event_state.go b/model/event_state.go index aeb3e59..d5068d0 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -19,17 +19,21 @@ import ( "fmt" ) -// EventState used to wait for events from event sources, then consumes them and invoke one or more actions to run in sequence or parallel +// EventState await one or more events and perform actions when they are received. If defined as the +// workflow starting state, the event state definition controls when the workflow instances should be created. type EventState struct { // TODO: EventState doesn't have usedForCompensation field. - // If true consuming one of the defined events causes its associated actions to be performed. - // If false all the defined events must be consumed in order for actions to be performed - // Defaults to true + // If true consuming one of the defined events causes its associated actions to be performed. If false all + // the defined events must be consumed in order for actions to be performed. Defaults to true. + // +kubebuilder:default=true + // +optional Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed + // Define the events to be consumed and optional actions to be performed. + // +kubebuilder:validation:MinItems=1 OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts + // State specific timeouts. + // +optional Timeouts *EventStateTimeout `json:"timeouts,omitempty"` } @@ -63,14 +67,18 @@ func (e *EventState) UnmarshalJSON(data []byte) error { // OnEvents define which actions are be performed for the one or more events. type OnEvents struct { - // References one or more unique event names in the defined workflow events + // References one or more unique event names in the defined workflow events. + // +kubebuilder:validation:MinItems=1 EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Specifies how actions are to be performed (in sequence or parallel) - // Defaults to sequential + // Should actions be performed sequentially or in parallel. Default is sequential. + // +kubebuilder:validation:Enum=sequential;parallel + // +kubebuilder:default=sequential ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` // Actions to be performed if expression matches + // +optional Actions []Action `json:"actions,omitempty" validate:"omitempty,dive"` - // Event data filter + // eventDataFilter defines the callback event data filter definition + // +optional EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` } @@ -94,7 +102,13 @@ func (o *OnEvents) UnmarshalJSON(data []byte) error { // EventStateTimeout defines timeout settings for event state type EventStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // Default single actions definition execution timeout (ISO 8601 duration format) + // +optional + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default timeout for consuming defined events (ISO 8601 duration format) + // +optional + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/foreach_state.go b/model/foreach_state.go index 099c989..a2ce19b 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -33,19 +33,29 @@ const ( // ForEachState used to execute actions for each element of a data set. type ForEachState struct { - // Workflow expression selecting an array element of the states data + // Workflow expression selecting an array element of the states' data. + // +kubebuilder:validation:Required InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration + // Workflow expression specifying an array element of the states data to add the results of each iteration. + // +optional OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array + // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, + // this param should contain a unique element of the inputCollection array. + // +optional IterationParam string `json:"iterationParam,omitempty"` - // Specifies how upper bound on how many iterations may run in parallel + // Specifies how many iterations may run in parallel at the same time. Used if mode property is set to + // parallel (default). If not specified, its value should be the size of the inputCollection. + // +optional BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection + // Actions to be executed for each of the elements of inputCollection. + // +kubebuilder:validation:MinItems=1 Actions []Action `json:"actions,omitempty" validate:"required,min=1,dive"` - // State specific timeout + // State specific timeout. + // +optional Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Mode Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel + // Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel. + // +kubebuilder:validation:Enum=sequential;parallel + // +kubebuilder:default=parallel Mode ForEachModeType `json:"mode,omitempty"` } @@ -78,6 +88,10 @@ func (f *ForEachState) UnmarshalJSON(data []byte) error { // ForEachStateTimeout defines timeout settings for foreach state type ForEachStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // Default single actions definition execution timeout (ISO 8601 duration format) + // +optional + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/function.go b/model/function.go index 9f69c1a..385034b 100644 --- a/model/function.go +++ b/model/function.go @@ -44,12 +44,20 @@ type FunctionType string type Function struct { Common `json:",inline"` // Unique function name + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` - // If type is `rest`, #. If type is `rpc`, ##. - // If type is `expression`, defines the workflow expression. If the type is `custom`, #. - Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `odata` or `custom`. Default is `rest` + // If type is `rest`, #. + // If type is `rpc`, ##. + // If type is `expression`, defines the workflow expression. If the type is `custom`, + // #. + // +kubebuilder:validation:Required + Operation string `json:"operation" validate:"required,oneof=rest rpc expression"` + // Defines the function type. Is either `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `asyncapi` or `asyncapi`. + // Default is `rest`. + // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;asyncapi;asyncapi;asyncapi + // +kubebuilder:default=rest Type FunctionType `json:"type,omitempty"` - // References an auth definition name to be used to access to resource defined in the operation parameter + // References an auth definition name to be used to access to resource defined in the operation parameter. + // +optional AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` } diff --git a/model/inject_state.go b/model/inject_state.go index 681ac63..a195423 100644 --- a/model/inject_state.go +++ b/model/inject_state.go @@ -20,9 +20,11 @@ import ( // InjectState used to inject static data into state data input. type InjectState struct { - // JSON object which can be set as states data input and can be manipulated via filters + // JSON object which can be set as state's data input and can be manipulated via filter + // +kubebuilder:validation:MinProperties=1 Data map[string]Object `json:"data" validate:"required,min=1"` // State specific timeouts + // +optional Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` } @@ -40,5 +42,7 @@ func (i *InjectState) MarshalJSON() ([]byte, error) { // InjectStateTimeout defines timeout settings for inject state type InjectStateTimeout struct { + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } diff --git a/model/operation_state.go b/model/operation_state.go index 388adfe..050676f 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -20,11 +20,15 @@ import ( // OperationState defines a set of actions to be performed in sequence or in parallel. type OperationState struct { - // Specifies whether actions are performed in sequence or in parallel, defaults to sequential + // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. + // +kubebuilder:validation:Enum=sequential;parallel + // +kubebuilder:default=sequential ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` // Actions to be performed + // +kubebuilder:validation:MinItems=1 Actions []Action `json:"actions" validate:"required,min=1,dive"` // State specific timeouts + // +optional Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` } @@ -59,6 +63,10 @@ func (o *OperationState) UnmarshalJSON(data []byte) error { // OperationStateTimeout defines the specific timeout settings for operation state type OperationStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Defines workflow state execution timeout. + // +optional + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // Default single actions definition execution timeout (ISO 8601 duration format) + // +optional + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/parallel_state.go b/model/parallel_state.go index 53bce0f..55d95f6 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -34,15 +34,20 @@ const ( // ParallelState Consists of a number of states that are executed in parallel type ParallelState struct { - // Branch Definitions + // List of branches for this parallel state. + // +kubebuilder:validation:MinItems=1 Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. - // Defaults to `allOf` + // Option types on how to complete branch execution. Defaults to `allOf`. + // +kubebuilder:validation:Enum=allOf;atLeast + // +kubebuilder:default=allOf CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneof=allOf atLeast"` - // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." + // Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete + // in order for the state to transition/end. + // +optional // TODO: change this field to unmarshal result as int NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` // State specific timeouts + // +optional Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` } @@ -83,23 +88,32 @@ func (ps *ParallelState) UnmarshalJSON(b []byte) error { // Branch Definition type Branch struct { // Branch name + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` // Actions to be executed in this branch + // +kubebuilder:validation:MinItems=1 Actions []Action `json:"actions" validate:"required,min=1,dive"` - // Timeouts State specific timeouts + // Branch specific timeout settings + // +optional Timeouts *BranchTimeouts `json:"timeouts,omitempty"` } // BranchTimeouts defines the specific timeout settings for branch type BranchTimeouts struct { - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) + // Single actions definition execution timeout duration (ISO 8601 duration format) + // +optional ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) + // Single branch execution timeout duration (ISO 8601 duration format) + // +optional BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` } // ParallelStateTimeout defines the specific timeout settings for parallel state type ParallelStateTimeout struct { - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // Default single branch execution timeout (ISO 8601 duration format) + // +optional + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` } diff --git a/model/retry.go b/model/retry.go index 7430adb..cda444c 100644 --- a/model/retry.go +++ b/model/retry.go @@ -23,6 +23,7 @@ import ( // Retry ... type Retry struct { // Unique retry strategy name + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` // Time delay between retry attempts (ISO 8601 duration format) Delay string `json:"delay,omitempty" validate:"omitempty,iso8601duration"` @@ -33,6 +34,7 @@ type Retry struct { // Numeric value, if specified the delay between retries is multiplied by this value. Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` // Maximum number of retry attempts. + // +kubebuilder:validation:Required MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) // TODO: make iso8601duration compatible this type diff --git a/model/sleep_state.go b/model/sleep_state.go index 7eddd41..5d144c5 100644 --- a/model/sleep_state.go +++ b/model/sleep_state.go @@ -21,8 +21,10 @@ import ( // SleepState suspends workflow execution for a given time duration. type SleepState struct { // Duration (ISO 8601 duration format) to sleep + // +kubebuilder:validation:Required Duration string `json:"duration" validate:"required,iso8601duration"` // Timeouts State specific timeouts + // +optional Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` } @@ -40,5 +42,7 @@ func (s *SleepState) MarshalJSON() ([]byte, error) { // SleepStateTimeout defines timeout settings for sleep state type SleepStateTimeout struct { + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` } diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index 75a5c87..5de5f8c 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -23,8 +23,10 @@ import ( // StateExecTimeout defines workflow state execution timeout type StateExecTimeout struct { // Single state execution timeout, not including retries (ISO 8601 duration format) + // +optional Single string `json:"single,omitempty" validate:"omitempty,iso8601duration"` // Total state execution timeout, including retries (ISO 8601 duration format) + // +kubebuilder:validation:Required Total string `json:"total" validate:"required,iso8601duration"` } diff --git a/model/states.go b/model/states.go index 9862651..d6b4495 100644 --- a/model/states.go +++ b/model/states.go @@ -46,24 +46,36 @@ const ( // BaseState ... type BaseState struct { - // Unique State id + // Unique State id. + // +optional ID string `json:"id,omitempty"` - // State name + // State name. + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` - // State type + // stateType can be any of delay, callback, event, foreach, inject, operation, parallel, sleep, switch + // +kubebuilder:validation:Enum:=delay;callback;event;foreach;inject;operation;parallel;sleep;switch + // +kubebuilder:validation:Required Type StateType `json:"type" validate:"required"` - // States error handling and retries definitions - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` - // Next transition of the workflow after the time delay + // States error handling and retries definitions. + // +optional + OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` + // Next transition of the workflow after the time delay. + // +optional Transition *Transition `json:"transition,omitempty"` - // State data filter + // State data filter. + // +optional StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` - // Unique Name of a workflow state which is responsible for compensation of this state + // Unique Name of a workflow state which is responsible for compensation of this state. + // +optional CompensatedBy string `json:"compensatedBy,omitempty"` - // If true, this state is used to compensate another state. Default is false + // If true, this state is used to compensate another state. Default is false. + // +optional UsedForCompensation bool `json:"usedForCompensation,omitempty"` - // State end definition - End *End `json:"end,omitempty"` + // State end definition. + // +optional + End *End `json:"end,omitempty"` + // Metadata information. + // +optional Metadata *Metadata `json:"metadata,omitempty"` } @@ -115,21 +127,39 @@ func (b *BaseState) UnmarshalJSON(data []byte) error { if err := unmarshalKey("metadata", baseState, &b.Metadata); err != nil { return err } - return nil } type State struct { - BaseState `json:",omitempty"` - *DelayState `json:",omitempty"` - *EventState `json:",omitempty"` - *OperationState `json:",omitempty"` - *ParallelState `json:",omitempty"` - *SwitchState `json:",omitempty"` - *ForEachState `json:",omitempty"` - *InjectState `json:",omitempty"` - *CallbackState `json:",omitempty"` - *SleepState `json:",omitempty"` + BaseState `json:",inline"` + // delayState Causes the workflow execution to delay for a specified duration. + // +optional + *DelayState `json:"delayState,omitempty"` + // event states await one or more events and perform actions when they are received. If defined as the + // workflow starting state, the event state definition controls when the workflow instances should be created. + // +optional + *EventState `json:"eventState,omitempty"` + // operationState defines a set of actions to be performed in sequence or in parallel. + // +optional + *OperationState `json:"operationState,omitempty"` + // parallelState Consists of a number of states that are executed in parallel. + // +optional + *ParallelState `json:"parallelState,omitempty"` + // switchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. + // +optional + *SwitchState `json:"switchState,omitempty"` + // forEachState used to execute actions for each element of a data set. + // +optional + *ForEachState `json:"forEachState,omitempty"` + // injectState used to inject static data into state data input. + // +optional + *InjectState `json:"injectState,omitempty"` + // callbackState executes a function and waits for callback event that indicates completion of the task. + // +optional + *CallbackState `json:"callbackState,omitempty"` + // sleepState suspends workflow execution for a given time duration. + // +optional + *SleepState `json:"sleepState,omitempty"` } func (s *State) MarshalJSON() ([]byte, error) { diff --git a/model/switch_state.go b/model/switch_state.go index cc630bc..1e87110 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -22,14 +22,17 @@ import ( type SwitchState struct { // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. - // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition - // Required - DefaultCondition DefaultCondition `json:"defaultCondition"` - // Defines conditions evaluated against events - EventConditions []EventCondition `json:"eventConditions" validate:"omitempty,min=1,dive"` + // Default transition of the workflow if there is no matching data conditions. Can include a transition or + // end definition. + DefaultCondition DefaultCondition `json:"defaultCondition" validate:"required_without=EventConditions"` + // Defines conditions evaluated against events. + // +optional + EventConditions []EventCondition `json:"eventConditions" validate:"required_without=DefaultCondition"` // Defines conditions evaluated against data + // +optional DataConditions []DataCondition `json:"dataConditions" validate:"omitempty,min=1,dive"` // SwitchState specific timeouts + // +optional Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` } @@ -47,29 +50,41 @@ func (s *SwitchState) MarshalJSON() ([]byte, error) { // DefaultCondition Can be either a transition or end definition type DefaultCondition struct { + // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). + // Each state can define a transition definition that is used to determine which state to transition to next. + // +optional Transition *Transition `json:"transition,omitempty"` - End *End `json:"end,omitempty"` + // If this state an end state + // +optional + End *End `json:"end,omitempty"` } // SwitchStateTimeout defines the specific timeout settings for switch state type SwitchStateTimeout struct { + // Default workflow state execution timeout (ISO 8601 duration format) + // +optional StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - - // EventTimeout specify the expire value to transitions to defaultCondition - // when event-based conditions do not arrive. + // Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. // NOTE: this is only available for EventConditions + // +optional EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` } // EventCondition specify events which the switch state must wait for. type EventCondition struct { - // Event condition name + // Event condition name. + // +optional Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events + // References a unique event name in the defined workflow events. + // +kubebuilder:validation:Required EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition + // Event data filter definition. + // +optional EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` - Metadata Metadata `json:"metadata,omitempty"` + // Metadata information. + // +optional + Metadata Metadata `json:"metadata,omitempty"` + // TODO End or Transition needs to be exclusive tag, one or another should be set. // Explicit transition to end End *End `json:"end" validate:"omitempty"` // Workflow transition if condition is evaluated to true @@ -79,12 +94,16 @@ type EventCondition struct { // DataCondition specify a data-based condition statement which causes a transition to another workflow state // if evaluated to true. type DataCondition struct { - // Data condition name + // Data condition name. + // +optional Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false - Condition string `json:"condition" validate:"required"` - Metadata Metadata `json:"metadata,omitempty"` - + // Workflow expression evaluated against state data. Must evaluate to true or false. + // +kubebuilder:validation:Required + Condition string `json:"condition" validate:"required"` + // Metadata information. + // +optional + Metadata Metadata `json:"metadata,omitempty"` + // TODO End or Transition needs to be exclusive tag, one or another should be set. // Explicit transition to end End *End `json:"end" validate:"omitempty"` // Workflow transition if condition is evaluated to true diff --git a/model/workflow.go b/model/workflow.go index c61e3ea..0c0fa34 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -43,52 +43,83 @@ const ( ) const ( - // DefaultExpressionLang ... - DefaultExpressionLang = "jq" - // UnlimitedTimeout description for unlimited timeouts UnlimitedTimeout = "unlimited" ) +type ExpressionLangType string + +const ( + //JqExpressionLang ... + JqExpressionLang ExpressionLangType = "jq" + + // JsonPathExpressionLang ... + JsonPathExpressionLang ExpressionLangType = "jsonpath" +) + // BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces // to make it easy for custom unmarshalers implementations to unmarshal the common data structure. type BaseWorkflow struct { // Workflow unique identifier + // +optional ID string `json:"id,omitempty" validate:"required_without=Key"` // Key Domain-specific workflow identifier + // +optional Key string `json:"key,omitempty" validate:"required_without=ID"` // Workflow name Name string `json:"name,omitempty"` - // Workflow description + // Workflow description. + // +optional Description string `json:"description,omitempty"` - // Workflow version + // Workflow version. + // +optional Version string `json:"version" validate:"omitempty,min=1"` - Start *Start `json:"start,omitempty"` - // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important qualities + // Workflow start definition. + // +optional + Start *Start `json:"start,omitempty"` + // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important + // qualities. + // +optional Annotations []string `json:"annotations,omitempty"` // DataInputSchema URI of the JSON Schema used to validate the workflow data input + // +optional DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` // Serverless Workflow schema version - SpecVersion string `json:"specVersion,omitempty" validate:"required"` - // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. + // +kubebuilder:validation:Required + SpecVersion string `json:"specVersion" validate:"required"` + // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, + // inside your Workflow Expressions. + // +optional Secrets Secrets `json:"secrets,omitempty"` - // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. + // Constants Workflow constants are used to define static, and immutable, data which is available to + // Workflow Expressions. + // +optional Constants *Constants `json:"constants,omitempty"` - // Identifies the expression language used for workflow expressions. Default is 'jq' - ExpressionLang string `json:"expressionLang,omitempty" validate:"omitempty,min=1"` - // Timeouts definition for Workflow, State, Action, Branch, and Event consumption. + // Identifies the expression language used for workflow expressions. Default is 'jq'. + // +kubebuilder:validation:Enum=jq;jsonpath + // +kubebuilder:default=jq + // +optional + ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"omitempty,min=1,oneof=jq jsonpath"` + // Defines the workflow default timeout settings. + // +optional Timeouts *Timeouts `json:"timeouts,omitempty"` - // Errors declarations for this Workflow definition + // Defines checked errors that can be explicitly handled during workflow execution. + // +optional Errors []Error `json:"errors,omitempty"` - // If 'true', workflow instances is not terminated when there are no active execution paths. Instance can be terminated via 'terminate end definition' or reaching defined 'execTimeout' + // If "true", workflow instances is not terminated when there are no active execution paths. + // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" + // +optional KeepActive bool `json:"keepActive,omitempty"` - // Metadata custom information shared with the runtime + // Metadata custom information shared with the runtime. + // +optional Metadata Metadata `json:"metadata,omitempty"` // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false + // +optional AutoRetries bool `json:"autoRetries,omitempty"` - // Auth definitions can be used to define authentication information that should be applied to resources defined in the operation - // property of function definitions. It is not used as authentication information for the function invocation, - // but just to access the resource containing the function invocation information. + // Auth definitions can be used to define authentication information that should be applied to resources defined + // in the operation property of function definitions. It is not used as authentication information for the + // function invocation, but just to access the resource containing the function invocation information. + // +optional Auth AuthArray `json:"auth,omitempty" validate:"omitempty"` } @@ -132,10 +163,14 @@ func (r *AuthArray) unmarshalMany(data []byte) error { // Workflow base definition type Workflow struct { BaseWorkflow - States []State `json:"states" validate:"required,min=1,dive"` - Events []Event `json:"events,omitempty"` + // +kubebuilder:validation:MinItems=1 + States []State `json:"states" validate:"required,min=1,dive"` + // +optional + Events []Event `json:"events,omitempty"` + // +optional Functions []Function `json:"functions,omitempty"` - Retries []Retry `json:"retries,omitempty" validate:"dive"` + // +optional + Retries []Retry `json:"retries,omitempty" validate:"dive"` } // UnmarshalJSON implementation for json Unmarshal function for the Workflow type @@ -241,21 +276,27 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { func (w *Workflow) setDefaults() { if len(w.ExpressionLang) == 0 { - w.ExpressionLang = DefaultExpressionLang + w.ExpressionLang = JqExpressionLang } } // Timeouts ... type Timeouts struct { - // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' + // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should + // be 'unlimited'. + // +optional WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` - // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format) + // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format). + // +optional StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) + // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format). + // +optional ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) + // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format). + // +optional BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` - // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format) + // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format). + // +optional EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` } @@ -292,14 +333,18 @@ func (t *Timeouts) UnmarshalJSON(data []byte) error { return nil } -// WorkflowExecTimeout ... +// WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration +// format. If not defined, the workflow execution should be given "unlimited" amount of time to complete. type WorkflowExecTimeout struct { - // Duration Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' - Duration string `json:"duration,omitempty" validate:"omitempty,min=1"` - // If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is - // abrupted terminated. + // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. + // +kubebuilder:default=unlimited + Duration string `json:"duration" validate:"required,min=1"` + // If false, workflow instance is allowed to finish current execution. If true, current workflow execution + // is stopped immediately. Default is false. + // +optional Interrupt bool `json:"interrupt,omitempty"` - // Name of a workflow state to be executed before workflow instance is terminated + // Name of a workflow state to be executed before workflow instance is terminated. + // +optional RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` } @@ -330,18 +375,27 @@ func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { // Error declaration for workflow definitions type Error struct { - // Name Domain-specific error name + // Name Domain-specific error name. + // +kubebuilder:validation:Required Name string `json:"name" validate:"required"` - // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' + // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. + // Should not be defined if error is set to '*'. + // +optional Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // OnError description + // OnError description. + // +optional Description string `json:"description,omitempty"` } // Start definition type Start struct { - StateName string `json:"stateName" validate:"required"` - Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` + // Name of the starting workflow state + // +kubebuilder:validation:Required + StateName string `json:"stateName" validate:"required"` + // Define the recurring time intervals or cron expressions at which workflow instances should be automatically + // started. + // +optional + Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` } // UnmarshalJSON ... @@ -366,10 +420,17 @@ func (s *Start) UnmarshalJSON(data []byte) error { // Schedule ... type Schedule struct { - // Time interval (must be repeating interval) described with ISO 8601 format. Declares when workflow instances will be automatically created. + // TODO Interval is required if Cron is not set and vice-versa, make a exclusive validation + // A recurring time interval expressed in the derivative of ISO 8601 format specified below. Declares that + // workflow instances should be automatically created at the start of each time interval in the series. + // +optional Interval string `json:"interval,omitempty"` - Cron *Cron `json:"cron,omitempty"` - // Timezone name used to evaluate the interval & cron-expression. (default: UTC) + // Cron expression defining when workflow instances should be automatically created. + // optional + Cron *Cron `json:"cron,omitempty"` + // Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time + // w/ timezone then proper timezone conversion will be applied. (default: UTC). + // +optional Timezone string `json:"timezone,omitempty"` } @@ -399,10 +460,12 @@ func (s *Schedule) UnmarshalJSON(data []byte) error { // Cron ... type Cron struct { - // Repeating interval (cron expression) describing when the workflow instance should be created + // Cron expression describing when the workflow instance should be created (automatically). + // +kubebuilder:validation:Required Expression string `json:"expression" validate:"required"` - // Specific date and time (ISO 8601 format) when the cron expression invocation is no longer valid - ValidUntil string `json:"validUntil,omitempty"` + // Specific date and time (ISO 8601 format) when the cron expression is no longer valid. + // +optional + ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601duration"` } // UnmarshalJSON custom unmarshal function for Cron @@ -422,13 +485,18 @@ func (c *Cron) UnmarshalJSON(data []byte) error { return nil } -// Transition ... +// Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). +// Each state can define a transition definition that is used to determine which state to transition to next. type Transition struct { - // Name of state to transition to + // Name of the state to transition to next. + // +kubebuilder:validation:Required NextState string `json:"nextState" validate:"required,min=1"` - // Array of events to be produced before the transition happens + // Array of producedEvent definitions. Events to be produced before the transition takes place. + // +optional ProduceEvents []ProduceEvent `json:"produceEvents,omitempty" validate:"omitempty,dive"` - // If set to true, triggers workflow compensation when before this transition is taken. Default is false + // If set to true, triggers workflow compensation before this transition is taken. Default is false. + // +kubebuilder:default=false + // +optional Compensate bool `json:"compensate,omitempty"` } @@ -470,12 +538,18 @@ type OnError struct { // End definition type End struct { - // If true, completes all execution flows in the given workflow instance + // If true, completes all execution flows in the given workflow instance. + // +optional Terminate bool `json:"terminate,omitempty"` - // Defines events that should be produced + // Array of producedEvent definitions. Defines events that should be produced. + // +optional ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` - // If set to true, triggers workflow compensation. Default is false - Compensate bool `json:"compensate,omitempty"` + // If set to true, triggers workflow compensation before workflow execution completes. Default is false. + // +optional + Compensate bool `json:"compensate,omitempty"` + // Defines that current workflow execution should stop, and execution should continue as a new workflow + // instance of the provided id + // +optional ContinueAs *ContinueAs `json:"continueAs,omitempty"` } @@ -500,13 +574,18 @@ func (e *End) UnmarshalJSON(data []byte) error { // ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) type ContinueAs struct { // Unique id of the workflow to continue execution as. + // +kubebuilder:validation:Required WorkflowID string `json:"workflowId" validate:"required"` // Version of the workflow to continue execution as. + // +optional Version string `json:"version,omitempty"` // If string type, an expression which selects parts of the states data output to become the workflow data input of // continued execution. If object type, a custom object to become the workflow data input of the continued execution + // +optional Data Object `json:"data,omitempty"` - // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. Overwrites any specific settings set by that workflow + // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. + // Overwrites any specific settings set by that workflow + // +optional WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` } @@ -537,15 +616,19 @@ func (c *ContinueAs) UnmarshalJSON(data []byte) error { return fmt.Errorf("continueAs value '%s' is not supported, it must be an object or string", string(data)) } -// ProduceEvent ... +// ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a +// workflow transitions. The eventRef property must match the name of one of the defined produced events in the +// events definition. type ProduceEvent struct { - // References a name of a defined event + // Reference to a defined unique event name in the events definition + // +kubebuilder:validation:Required EventRef string `json:"eventRef" validate:"required"` - // TODO: add object or string data type // If String, expression which selects parts of the states data output to become the data of the produced event. // If object a custom object to become the data of produced event. - Data string `json:"data,omitempty"` - // Add additional event extension context attributes + // +optional + Data Object `json:"data,omitempty"` + // Add additional event extension context attributes. + // +optional ContextAttributes map[string]string `json:"contextAttributes,omitempty"` } @@ -557,10 +640,12 @@ type StateDataFilter struct { Output string `json:"output,omitempty"` } -// DataInputSchema ... +// DataInputSchema Used to validate the workflow data input against a defined JSON Schema type DataInputSchema struct { - Schema string `json:"schema" validate:"required"` - FailOnValidationErrors *bool `json:"failOnValidationErrors" validate:"required"` + // +kubebuilder:validation:Required + Schema string `json:"schema" validate:"required"` + // +kubebuilder:validation:Required + FailOnValidationErrors *bool `json:"failOnValidationErrors" validate:"required"` } // UnmarshalJSON ... @@ -584,7 +669,8 @@ func (d *DataInputSchema) UnmarshalJSON(data []byte) error { return nil } -// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. +// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your +// Workflow Expressions. type Secrets []string // UnmarshalJSON ... @@ -606,6 +692,7 @@ func (s *Secrets) UnmarshalJSON(data []byte) error { // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. type Constants struct { // Data represents the generic structure of the constants value + // +optional Data map[string]json.RawMessage `json:",omitempty"` } diff --git a/model/workflow_ref.go b/model/workflow_ref.go index b77da15..04cc6c6 100644 --- a/model/workflow_ref.go +++ b/model/workflow_ref.go @@ -23,16 +23,21 @@ import ( // WorkflowRef holds a reference for a workflow definition type WorkflowRef struct { // Sub-workflow unique id + // +kubebuilder:validation:Required WorkflowID string `json:"workflowId" validate:"required"` // Sub-workflow version + // +optional Version string `json:"version,omitempty"` - - // Invoke specifies if the subflow should be invoked sync or async. + // Specifies if the subflow should be invoked sync or async. // Defaults to sync. + // +kubebuilder:validation:Enum=async;sync + // +kubebuilder:default=sync + // +optional Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` - - // OnParantComplete specifies how subflow execution should behave when parent workflow completes if invoke is 'async'。 - // Defaults to terminate. + // onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + // is 'async'. Defaults to terminate. + // +kubebuilder:validation:Enum=terminate;continue + // +kubebuilder:default=terminate OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` } diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 93ce084..41da8f5 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -1170,6 +1170,7 @@ func (in *ParallelStateTimeout) DeepCopy() *ParallelStateTimeout { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ProduceEvent) DeepCopyInto(out *ProduceEvent) { *out = *in + in.Data.DeepCopyInto(&out.Data) if in.ContextAttributes != nil { in, out := &in.ContextAttributes, &out.ContextAttributes *out = make(map[string]string, len(*in)) From d4781e3863d33d26a322d9373b6c22ef75a49fab Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Mon, 20 Mar 2023 09:40:19 -0300 Subject: [PATCH 059/110] Allow the DefaultCondition on events to be string or object (#163) fixes https://github.com/serverlessworkflow/sdk-go/issues/157 Signed-off-by: Spolti --- kubernetes/k8s_workflow_integration.go | 15 ++++++- model/function.go | 4 +- model/object.go | 8 ++-- model/retry.go | 1 + model/switch_state.go | 43 +++++++++++++------ model/workflow.go | 26 +++++------ parser/parser_test.go | 29 +++++++++++-- .../workflows/greetings-v08-spec.sw.yaml | 10 +++++ 8 files changed, 97 insertions(+), 39 deletions(-) diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/k8s_workflow_integration.go index af8b484..1733611 100644 --- a/kubernetes/k8s_workflow_integration.go +++ b/kubernetes/k8s_workflow_integration.go @@ -26,13 +26,24 @@ import ( // github.com/serverlessworkflow/sdk-go/model/event.go:51:2: encountered struct field "" without JSON tag in type "Event" // github.com/serverlessworkflow/sdk-go/model/states.go:66:12: unsupported AST kind *ast.InterfaceType +// States should be objects that will be in the same array even if it belongs to +// different types. An issue similar to the below will happen when trying to deploy your custom CR: +// strict decoding error: unknown field "spec.states[0].dataConditions" +// To make the CRD is compliant to the specs there are two options, +// a flat struct with all states fields at the same level, +// or use the // +kubebuilder:pruning:PreserveUnknownFields +// kubebuilder validator and delegate the validation to the sdk-go validator using the admission webhook. +// TODO add a webhook example + // ServerlessWorkflowSpec defines a base API for integration test with operator-sdk type ServerlessWorkflowSpec struct { - BaseWorkflow model.BaseWorkflow `json:"inline"` + BaseWorkflow model.BaseWorkflow `json:",inline"` Events []model.Event `json:"events,omitempty"` Functions []model.Function `json:"functions,omitempty"` Retries []model.Retry `json:"retries,omitempty"` - States []model.State `json:"states"` + // +kubebuilder:validation:MinItems=1 + // +kubebuilder:pruning:PreserveUnknownFields + States []model.State `json:"states"` } // ServerlessWorkflow ... diff --git a/model/function.go b/model/function.go index 385034b..c48dfeb 100644 --- a/model/function.go +++ b/model/function.go @@ -52,9 +52,9 @@ type Function struct { // #. // +kubebuilder:validation:Required Operation string `json:"operation" validate:"required,oneof=rest rpc expression"` - // Defines the function type. Is either `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `asyncapi` or `asyncapi`. + // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `asyncapi` or `asyncapi`. // Default is `rest`. - // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;asyncapi;asyncapi;asyncapi + // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;asyncapi;asyncapi;asyncapi;custom // +kubebuilder:default=rest Type FunctionType `json:"type,omitempty"` // References an auth definition name to be used to access to resource defined in the operation parameter. diff --git a/model/object.go b/model/object.go index 074b3dd..614b396 100644 --- a/model/object.go +++ b/model/object.go @@ -33,10 +33,10 @@ import ( // // +kubebuilder:validation:Type=object type Object struct { - Type Type `json:",inline"` - IntVal int32 `json:",inline"` - StrVal string `json:",inline"` - RawValue json.RawMessage `json:",inline"` + Type Type `json:"type,inline"` + IntVal int32 `json:"intVal,inline"` + StrVal string `json:"strVal,inline"` + RawValue json.RawMessage `json:"rawValue,inline"` } type Type int64 diff --git a/model/retry.go b/model/retry.go index cda444c..6ce8277 100644 --- a/model/retry.go +++ b/model/retry.go @@ -32,6 +32,7 @@ type Retry struct { // Static value by which the delay increases during each attempt (ISO 8601 time format) Increment string `json:"increment,omitempty" validate:"omitempty,iso8601duration"` // Numeric value, if specified the delay between retries is multiplied by this value. + // +optional Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` // Maximum number of retry attempts. // +kubebuilder:validation:Required diff --git a/model/switch_state.go b/model/switch_state.go index 1e87110..dc6a971 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -36,6 +36,36 @@ type SwitchState struct { Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` } +// DefaultCondition Can be either a transition or end definition +type DefaultCondition struct { + // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). + // Each state can define a transition definition that is used to determine which state to transition to next. + // +optional + Transition *Transition `json:"transition,omitempty"` + // If this state an end state + // +optional + End *End `json:"end,omitempty"` +} + +// UnmarshalJSON ... +func (e *DefaultCondition) UnmarshalJSON(data []byte) error { + type defCondUnmarshal DefaultCondition + + obj, str, err := primitiveOrStruct[string, defCondUnmarshal](data) + if err != nil { + return err + } + + if obj == nil { + transition := &Transition{NextState: str} + e.Transition = transition + } else { + *e = DefaultCondition(*obj) + } + + return nil +} + func (s *SwitchState) MarshalJSON() ([]byte, error) { type Alias SwitchState custom, err := json.Marshal(&struct { @@ -48,17 +78,6 @@ func (s *SwitchState) MarshalJSON() ([]byte, error) { return custom, err } -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - // Each state can define a transition definition that is used to determine which state to transition to next. - // +optional - Transition *Transition `json:"transition,omitempty"` - // If this state an end state - // +optional - End *End `json:"end,omitempty"` -} - // SwitchStateTimeout defines the specific timeout settings for switch state type SwitchStateTimeout struct { // Default workflow state execution timeout (ISO 8601 duration format) @@ -107,5 +126,5 @@ type DataCondition struct { // Explicit transition to end End *End `json:"end" validate:"omitempty"` // Workflow transition if condition is evaluated to true - Transition *Transition `json:"transition" validate:"omitempty"` + Transition *Transition `json:"transition,omitempty" validate:"omitempty"` } diff --git a/model/workflow.go b/model/workflow.go index 0c0fa34..354d357 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -86,6 +86,7 @@ type BaseWorkflow struct { DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` // Serverless Workflow schema version // +kubebuilder:validation:Required + // +kubebuilder:default="0.8" SpecVersion string `json:"specVersion" validate:"required"` // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, // inside your Workflow Expressions. @@ -501,26 +502,19 @@ type Transition struct { } // UnmarshalJSON ... -func (t *Transition) UnmarshalJSON(data []byte) error { - transitionMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &transitionMap); err != nil { - t.NextState, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } +func (e *Transition) UnmarshalJSON(data []byte) error { + type defTransitionUnmarshal Transition - if err := unmarshalKey("compensate", transitionMap, &t.Compensate); err != nil { - return err - } - if err := unmarshalKey("produceEvents", transitionMap, &t.ProduceEvents); err != nil { - return err - } - if err := unmarshalKey("nextState", transitionMap, &t.NextState); err != nil { + obj, str, err := primitiveOrStruct[string, defTransitionUnmarshal](data) + if err != nil { return err } + if obj == nil { + e.NextState = str + } else { + *e = Transition(*obj) + } return nil } diff --git a/parser/parser_test.go b/parser/parser_test.go index a11106f..27d430d 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -16,6 +16,7 @@ package parser import ( "encoding/json" + "fmt" "os" "path/filepath" "strings" @@ -556,6 +557,14 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) assert.Equal(t, true, w.States[9].End.Terminate) + + // switch state with DefaultCondition as string + assert.NotEmpty(t, w.States[10].SwitchState) + assert.Equal(t, "HelloStateWithDefaultConditionString", w.States[10].Name) + assert.Equal(t, "${ true }", w.States[10].SwitchState.DataConditions[0].Condition) + assert.Equal(t, "HandleApprovedVisa", w.States[10].SwitchState.DataConditions[0].Transition.NextState) + assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) + assert.Equal(t, true, w.States[10].End.Terminate) }, }, } @@ -815,7 +824,17 @@ states: single: PT20S defaultCondition: transition: - nextState: CheckCreditCallback + nextState: HelloStateWithDefaultConditionString +- name: HelloStateWithDefaultConditionString + type: switch + dataConditions: + - condition: ${ true } + transition: + nextState: HandleApprovedVisa + - condition: ${ false } + transition: + nextState: HandleRejectedVisa + defaultCondition: SendTextForHighPriority - name: SendTextForHighPriority type: foreach inputCollection: "${ .messages }" @@ -911,6 +930,7 @@ states: terminate: true `)) assert.Nil(t, err) + fmt.Println(err) assert.NotNil(t, workflow) b, err := json.Marshal(workflow) @@ -936,7 +956,10 @@ states: assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) // Switch State - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"CheckCreditCallback\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"dataConditions\":null,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"dataConditions\":null,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) + + // Switch State with string DefaultCondition + assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"eventConditions\":null,\"dataConditions\":[{\"condition\":\"${ true }\",\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) // Foreach State assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"transition\":{\"nextState\":\"HelloInject\"},\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) @@ -973,7 +996,7 @@ states: nextState: HandleRejectedVisa defaultCondition: transition: - nextState: HandleNoVisaDecision + nextState: HandleApprovedVisa - name: HandleApprovedVisa type: operation actions: diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 71800b0..13b0d75 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -211,3 +211,13 @@ states: single: PT200S end: terminate: true + - name: HelloStateWithDefaultConditionString + type: switch + dataConditions: + - condition: ${ true } + transition: HandleApprovedVisa + - condition: ${ false } + transition: + nextState: HandleRejectedVisa + defaultCondition: SendTextForHighPriority + end: true \ No newline at end of file From 337db7af9b9562f6b0f80ad0f1ab345ae3883c2f Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Tue, 11 Apr 2023 11:58:29 -0300 Subject: [PATCH 060/110] Relax OpenAPI validation for some objects that can be Unmarshaled as two different types (#167) --- model/states.go | 4 ++++ model/switch_state.go | 12 ++++++++++++ model/workflow.go | 22 ++++++++++++++++++---- 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/model/states.go b/model/states.go index d6b4495..67bcfa9 100644 --- a/model/states.go +++ b/model/states.go @@ -60,6 +60,8 @@ type BaseState struct { // +optional OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` // Next transition of the workflow after the time delay. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Transition *Transition `json:"transition,omitempty"` // State data filter. @@ -72,6 +74,8 @@ type BaseState struct { // +optional UsedForCompensation bool `json:"usedForCompensation,omitempty"` // State end definition. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional End *End `json:"end,omitempty"` // Metadata information. diff --git a/model/switch_state.go b/model/switch_state.go index dc6a971..58cc8b5 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -40,9 +40,13 @@ type SwitchState struct { type DefaultCondition struct { // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). // Each state can define a transition definition that is used to determine which state to transition to next. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Transition *Transition `json:"transition,omitempty"` // If this state an end state + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional End *End `json:"end,omitempty"` } @@ -101,12 +105,20 @@ type EventCondition struct { // +optional EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` // Metadata information. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Metadata Metadata `json:"metadata,omitempty"` // TODO End or Transition needs to be exclusive tag, one or another should be set. // Explicit transition to end + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields + // +optional End *End `json:"end" validate:"omitempty"` // Workflow transition if condition is evaluated to true + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields + // +optional Transition *Transition `json:"transition" validate:"omitempty"` } diff --git a/model/workflow.go b/model/workflow.go index 354d357..e6724eb 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -26,8 +26,8 @@ type InvokeKind string const ( // InvokeKindSync meaning that worfklow execution should wait until the target completes. InvokeKindSync InvokeKind = "sync" - - // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its completion. + // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its + // completion. InvokeKindAsync InvokeKind = "async" ) @@ -75,6 +75,8 @@ type BaseWorkflow struct { // +optional Version string `json:"version" validate:"omitempty,min=1"` // Workflow start definition. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Start *Start `json:"start,omitempty"` // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important @@ -112,6 +114,8 @@ type BaseWorkflow struct { // +optional KeepActive bool `json:"keepActive,omitempty"` // Metadata custom information shared with the runtime. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Metadata Metadata `json:"metadata,omitempty"` // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false @@ -120,6 +124,8 @@ type BaseWorkflow struct { // Auth definitions can be used to define authentication information that should be applied to resources defined // in the operation property of function definitions. It is not used as authentication information for the // function invocation, but just to access the resource containing the function invocation information. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // +optional Auth AuthArray `json:"auth,omitempty" validate:"omitempty"` } @@ -524,9 +530,17 @@ type OnError struct { ErrorRef string `json:"errorRef,omitempty"` // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used ErrorRefs []string `json:"errorRefs,omitempty"` - // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if retries were unsuccessful. + // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if + // retries were unsuccessful. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields + // +optional Transition *Transition `json:"transition,omitempty"` - // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if retries were unsuccessful. + // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if + // retries were unsuccessful. + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields + // +optional End *End `json:"end,omitempty"` } From ee78bbaad2d252c12daea5da61151d491eaa3c9b Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 13 Apr 2023 09:10:00 -0300 Subject: [PATCH 061/110] add json tag in the BaseWorkflow (#169) --- kubernetes/k8s_workflow_integration.go | 8 +------- model/workflow.go | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/k8s_workflow_integration.go index 1733611..0f929c0 100644 --- a/kubernetes/k8s_workflow_integration.go +++ b/kubernetes/k8s_workflow_integration.go @@ -37,13 +37,7 @@ import ( // ServerlessWorkflowSpec defines a base API for integration test with operator-sdk type ServerlessWorkflowSpec struct { - BaseWorkflow model.BaseWorkflow `json:",inline"` - Events []model.Event `json:"events,omitempty"` - Functions []model.Function `json:"functions,omitempty"` - Retries []model.Retry `json:"retries,omitempty"` - // +kubebuilder:validation:MinItems=1 - // +kubebuilder:pruning:PreserveUnknownFields - States []model.State `json:"states"` + model.Workflow `json:",inline"` } // ServerlessWorkflow ... diff --git a/model/workflow.go b/model/workflow.go index e6724eb..bc3a517 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -169,7 +169,7 @@ func (r *AuthArray) unmarshalMany(data []byte) error { // Workflow base definition type Workflow struct { - BaseWorkflow + BaseWorkflow `json:",inline"` // +kubebuilder:validation:MinItems=1 States []State `json:"states" validate:"required,min=1,dive"` // +optional From b74604fe14ce964dc6591e1b24cce5f72551cbd8 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 27 Apr 2023 16:27:11 -0300 Subject: [PATCH 062/110] [States] unknown field defaultCondition (#170) Signed-off-by: Spolti --- model/workflow.go | 1 + 1 file changed, 1 insertion(+) diff --git a/model/workflow.go b/model/workflow.go index bc3a517..282da8c 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -171,6 +171,7 @@ func (r *AuthArray) unmarshalMany(data []byte) error { type Workflow struct { BaseWorkflow `json:",inline"` // +kubebuilder:validation:MinItems=1 + // +kubebuilder:pruning:PreserveUnknownFields States []State `json:"states" validate:"required,min=1,dive"` // +optional Events []Event `json:"events,omitempty"` From 2ee5933d08e2c36d9c48b545895ee9580f43f064 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Wed, 3 May 2023 17:53:56 -0300 Subject: [PATCH 063/110] Refactor in favor to use `primitiveOrStruct`. (#159) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * change unmarshalString to primitiveOrStruct Signed-off-by: AndrΓ© R. de Miranda * Add unit test for DefaultCondition UnmarshalJSON Signed-off-by: AndrΓ© R. de Miranda * Improve switch case Signed-off-by: AndrΓ© R. de Miranda * Change unmarshall array/object/file and change error messages Signed-off-by: AndrΓ© R. de Miranda * Check response.Write error Signed-off-by: AndrΓ© R. de Miranda * Move default values to the function ApplyDefault() Signed-off-by: AndrΓ© R. de Miranda * Change the go version to 1.20 on workflows Signed-off-by: AndrΓ© R. de Miranda * uri scheme empty the default is file:// Signed-off-by: AndrΓ© R. de Miranda * Change the golang version to 1.20.2. Fix lint Signed-off-by: AndrΓ© R. de Miranda * Rebase from main branch Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions. Add a new interface for the types used in const, that aux the validation and error handling. Signed-off-by: AndrΓ© R. de Miranda * Refactor unmarshalObjectOrFile and unmarshalArrayOrFile Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions. Rename interface Kinds to Kind Signed-off-by: AndrΓ© R. de Miranda * Accept external resource as object or array Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions. Support golang 1.19 and external resource Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions Signed-off-by: AndrΓ© R. de Miranda * Revision suggestions. Add more tests Signed-off-by: AndrΓ© R. de Miranda * Remove tag #nosec in model/util.go Co-authored-by: Filippe Spolti Signed-off-by: AndrΓ© R. de Miranda * Rename AuthArray to Auths, and ErrorArray to Errors. Improve loadExternalResource. Clean state json. Add state type validation oneofkind Signed-off-by: AndrΓ© R. de Miranda * Return json tags in state Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda Co-authored-by: Filippe Spolti --- .../Go-SDK-Check-k8s-integration.yaml | 2 +- .github/workflows/Go-SDK-PR-Check.yaml | 2 +- model/action.go | 67 +-- model/action_data_filter.go | 31 +- model/action_data_filter_test.go | 2 +- model/action_test.go | 62 +++ model/auth.go | 143 +----- model/auth_test.go | 24 +- model/event.go | 42 +- model/event_data_filter.go | 30 +- model/event_data_filter_test.go | 2 +- model/event_state.go | 37 +- model/event_test.go | 6 +- model/foreach_state.go | 32 +- model/foreach_state_test.go | 2 +- model/foreach_state_validator_test.go | 8 +- model/object.go | 2 +- model/operation_state.go | 18 +- model/parallel_state.go | 26 +- model/state_exec_timeout.go | 35 +- model/state_exec_timeout_test.go | 6 +- model/states.go | 94 ++-- model/switch_state.go | 18 +- model/switch_state_test.go | 95 ++++ model/util.go | 274 ++++++++--- model/util_test.go | 290 +++++++++-- model/workflow.go | 454 +++++------------- model/workflow_ref.go | 45 +- model/workflow_ref_test.go | 6 +- model/workflow_test.go | 423 +++++++++++++++- model/workflow_validator.go | 2 +- model/workflow_validator_test.go | 2 +- model/zz_generated.deepcopy.go | 17 +- parser/parser_test.go | 4 +- .../testdata/applicationrequestfunctions.json | 14 +- .../testdata/applicationrequestretries.json | 12 +- parser/testdata/eventdefs.yml | 1 - parser/testdata/functiondefs.json | 28 +- parser/testdata/secrets.json | 1 + .../workflows/applicationrequest-issue69.json | 2 +- .../workflows/applicationrequest.rp.json | 4 +- .../workflows/eventbasedgreeting.sw.p.json | 2 +- .../greetings-constants-file.sw.yaml | 2 +- .../workflows/greetings-secret-file.sw.yaml | 2 +- .../workflows/paymentconfirmation.json | 4 +- .../roomreadings.timeouts.file.sw.json | 2 +- parser/testdata/workflows/urifiles/auth.json | 28 +- validator/validator.go | 23 + validator/validator_test.go | 58 +++ 49 files changed, 1519 insertions(+), 967 deletions(-) diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml index 2b8c5e5..a2286d1 100644 --- a/.github/workflows/Go-SDK-Check-k8s-integration.yaml +++ b/.github/workflows/Go-SDK-Check-k8s-integration.yaml @@ -32,7 +32,7 @@ jobs: - name: Checkout Code uses: actions/checkout@v3 - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: ${{ env.GO_VERSION }} id: go diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 6d204af..8cfd8b1 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -32,7 +32,7 @@ jobs: - name: Checkout Code uses: actions/checkout@v3 - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: ${{ env.GO_VERSION }} id: go diff --git a/model/action.go b/model/action.go index ddf026b..5037ed1 100644 --- a/model/action.go +++ b/model/action.go @@ -14,12 +14,6 @@ package model -import ( - "bytes" - "encoding/json" - "fmt" -) - // Action specify invocations of services or other workflows during workflow execution. type Action struct { // Defines Unique action identifier. @@ -62,21 +56,17 @@ type Action struct { Condition string `json:"condition,omitempty"` } -type actionForUnmarshal Action +type actionUnmarshal Action // UnmarshalJSON implements json.Unmarshaler func (a *Action) UnmarshalJSON(data []byte) error { + a.ApplyDefault() + return unmarshalObject("action", data, (*actionUnmarshal)(a)) +} - v := actionForUnmarshal{ - ActionDataFilter: ActionDataFilter{UseResults: true}, - } - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("action value '%s' is not supported, it must be an object or string", string(data)) - } - *a = Action(v) - - return nil +// ApplyDefault set the default values for Action +func (a *Action) ApplyDefault() { + a.ActionDataFilter.ApplyDefault() } // FunctionRef defines the reference to a reusable function definition @@ -95,40 +85,20 @@ type FunctionRef struct { // Specifies if the function should be invoked sync or async. Default is sync. // +kubebuilder:validation:Enum=async;sync // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` } -type functionRefForUnmarshal FunctionRef +type functionRefUnmarshal FunctionRef // UnmarshalJSON implements json.Unmarshaler func (f *FunctionRef) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - var err error - switch data[0] { - case '"': - f.RefName, err = unmarshalString(data) - if err != nil { - return err - } - f.Invoke = InvokeKindSync - return nil - case '{': - v := functionRefForUnmarshal{ - Invoke: InvokeKindSync, - } - err = json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("functionRef value '%s' is not supported, it must be an object or string", string(data)) - } - *f = FunctionRef(v) - return nil - } + f.ApplyDefault() + return unmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) +} - return fmt.Errorf("functionRef value '%s' is not supported, it must be an object or string", string(data)) +// ApplyDefault set the default values for Function Ref +func (f *FunctionRef) ApplyDefault() { + f.Invoke = InvokeKindSync } // Sleep defines time periods workflow execution should sleep before & after function execution @@ -142,3 +112,10 @@ type Sleep struct { // +optional After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` } + +type sleepUnmarshal Sleep + +// UnmarshalJSON implements json.Unmarshaler +func (s *Sleep) UnmarshalJSON(data []byte) error { + return unmarshalObject("sleep", data, (*sleepUnmarshal)(s)) +} diff --git a/model/action_data_filter.go b/model/action_data_filter.go index ffd478f..16f1615 100644 --- a/model/action_data_filter.go +++ b/model/action_data_filter.go @@ -14,12 +14,6 @@ package model -import ( - "bytes" - "encoding/json" - "fmt" -) - // ActionDataFilter used to filter action data results. // +optional // +optional @@ -41,24 +35,15 @@ type ActionDataFilter struct { ToStateData string `json:"toStateData,omitempty"` } -type actionDataFilterForUnmarshal ActionDataFilter +type actionDataFilterUnmarshal ActionDataFilter // UnmarshalJSON implements json.Unmarshaler -func (f *ActionDataFilter) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - v := actionDataFilterForUnmarshal{ - UseResults: true, - } - err := json.Unmarshal(data, &v) - if err != nil { - // TODO: replace the error message with correct type's name - return err - } +func (a *ActionDataFilter) UnmarshalJSON(data []byte) error { + a.ApplyDefault() + return unmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) +} - *f = ActionDataFilter(v) - return nil +// ApplyDefault set the default values for Action Data Filter +func (a *ActionDataFilter) ApplyDefault() { + a.UseResults = true } diff --git a/model/action_data_filter_test.go b/model/action_data_filter_test.go index 54811fa..cae511a 100644 --- a/model/action_data_filter_test.go +++ b/model/action_data_filter_test.go @@ -61,7 +61,7 @@ func TestActionDataFilterUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"fromStateData": 1, "results": "2", "toStateData": "3"}`, expect: ActionDataFilter{}, - err: `json: cannot unmarshal number into Go struct field actionDataFilterForUnmarshal.fromStateData of type string`, + err: `actionDataFilter.fromStateData must be string`, }, } diff --git a/model/action_test.go b/model/action_test.go index 2301e12..5d0c7fb 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -15,6 +15,7 @@ package model import ( + "encoding/json" "testing" "github.com/stretchr/testify/assert" @@ -92,3 +93,64 @@ func TestSleepValidate(t *testing.T) { }) } } + +func TestFunctionRefUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect FunctionRef + err string + } + + testCases := []testCase{ + { + desp: "invalid object refName", + data: `{"refName": 1}`, + expect: FunctionRef{}, + err: "functionRef.refName must be string", + }, + { + desp: "object with refName", + data: `{"refName": "function name"}`, + expect: FunctionRef{ + RefName: "function name", + Invoke: InvokeKindSync, + }, + err: ``, + }, + { + desp: "object with refName and Invoke", + data: `{"refName": "function name", "invoke": "async"}`, + expect: FunctionRef{ + RefName: "function name", + Invoke: InvokeKindAsync, + }, + err: ``, + }, + { + desp: "refName string", + data: `"function name"`, + expect: FunctionRef{ + RefName: "function name", + Invoke: InvokeKindSync, + }, + err: ``, + }, + } + + for _, tc := range testCases[:1] { + t.Run(tc.desp, func(t *testing.T) { + var v FunctionRef + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Equal(t, tc.err, err.Error()) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/auth.go b/model/auth.go index 41565d9..9646633 100644 --- a/model/auth.go +++ b/model/auth.go @@ -62,59 +62,35 @@ type Auth struct { Properties AuthProperties `json:"properties" validate:"required"` } +type authUnmarshal Auth + // UnmarshalJSON Auth definition func (a *Auth) UnmarshalJSON(data []byte) error { - auth := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &auth); err != nil { - // it's a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - // call us recursively - if err := json.Unmarshal(file, &a); err != nil { - return err - } - return nil - } - if err := unmarshalKey("scheme", auth, &a.Scheme); err != nil { - return err - } - if err := unmarshalKey("name", auth, &a.Name); err != nil { + authTmp := struct { + authUnmarshal + PropertiesRaw json.RawMessage `json:"properties"` + }{} + + err := unmarshalObjectOrFile("auth", data, &authTmp) + if err != nil { return err } + *a = Auth(authTmp.authUnmarshal) if len(a.Scheme) == 0 { a.Scheme = AuthTypeBasic } switch a.Scheme { case AuthTypeBasic: - authProperties := &BasicAuthProperties{} - - if err := unmarshalKey("properties", auth, authProperties); err != nil { - return err - } - a.Properties.Basic = authProperties - - return nil - + a.Properties.Basic = &BasicAuthProperties{} + return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) case AuthTypeBearer: - authProperties := &BearerAuthProperties{} - if err := unmarshalKey("properties", auth, authProperties); err != nil { - return err - } - a.Properties.Bearer = authProperties - return nil - + a.Properties.Bearer = &BearerAuthProperties{} + return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) case AuthTypeOAuth2: - authProperties := &OAuth2AuthProperties{} - if err := unmarshalKey("properties", auth, authProperties); err != nil { - return err - } - a.Properties.OAuth2 = authProperties - return nil - + a.Properties.OAuth2 = &OAuth2AuthProperties{} + return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) default: return fmt.Errorf("failed to parse auth properties") } @@ -163,27 +139,6 @@ type BasicAuthProperties struct { Password string `json:"password" validate:"required"` } -// UnmarshalJSON ... -func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - return err - } - if err := unmarshalKey("username", properties, &b.Username); err != nil { - return err - } - if err := unmarshalKey("password", properties, &b.Password); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - if err := unmarshalKey("secret", properties, &b.Secret); err != nil { - return err - } - return nil -} - // BearerAuthProperties Bearer auth information type BearerAuthProperties struct { Common `json:",inline"` @@ -195,24 +150,6 @@ type BearerAuthProperties struct { Token string `json:"token" validate:"required"` } -// UnmarshalJSON ... -func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - return err - } - if err := unmarshalKey("token", properties, &b.Token); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - if err := unmarshalKey("secret", properties, &b.Secret); err != nil { - return err - } - return nil -} - // OAuth2AuthProperties OAuth2 information type OAuth2AuthProperties struct { Common `json:",inline"` @@ -256,51 +193,3 @@ type OAuth2AuthProperties struct { } // TODO: use reflection to unmarshal the keys and think on a generic approach to handle them - -// UnmarshalJSON ... -func (b *OAuth2AuthProperties) UnmarshalJSON(data []byte) error { - properties := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &properties); err != nil { - return err - } - if err := unmarshalKey("authority", properties, &b.Authority); err != nil { - return err - } - if err := unmarshalKey("grantType", properties, &b.GrantType); err != nil { - return err - } - if err := unmarshalKey("clientId", properties, &b.ClientID); err != nil { - return err - } - if err := unmarshalKey("clientSecret", properties, &b.ClientSecret); err != nil { - return err - } - if err := unmarshalKey("scopes", properties, &b.Scopes); err != nil { - return err - } - if err := unmarshalKey("username", properties, &b.Username); err != nil { - return err - } - if err := unmarshalKey("password", properties, &b.Password); err != nil { - return err - } - if err := unmarshalKey("audiences", properties, &b.Audiences); err != nil { - return err - } - if err := unmarshalKey("subjectToken", properties, &b.SubjectToken); err != nil { - return err - } - if err := unmarshalKey("requestedSubject", properties, &b.RequestedSubject); err != nil { - return err - } - if err := unmarshalKey("requestedIssuer", properties, &b.RequestedIssuer); err != nil { - return err - } - if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { - return err - } - if err := unmarshalKey("secret", properties, &b.Secret); err != nil { - return err - } - return nil -} diff --git a/model/auth_test.go b/model/auth_test.go index 6010dca..60602a2 100644 --- a/model/auth_test.go +++ b/model/auth_test.go @@ -24,19 +24,19 @@ import ( func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { t.Run("BearerAuthProperties", func(t *testing.T) { a1JSON := `{ - "name": "a1", - "scheme": "bearer", - "properties": { - "token": "token1" - } - }` + "name": "a1", + "scheme": "bearer", + "properties": { + "token": "token1" + } + }` a2JSON := `{ - "name": "a2", - "scheme": "bearer", - "properties": { - "token": "token2" - } - }` + "name": "a2", + "scheme": "bearer", + "properties": { + "token": "token2" + } + }` var a1 Auth err := json.Unmarshal([]byte(a1JSON), &a1) diff --git a/model/event.go b/model/event.go index f366eec..08545c5 100644 --- a/model/event.go +++ b/model/event.go @@ -14,10 +14,6 @@ package model -import ( - "encoding/json" -) - // EventKind defines this event as either `consumed` or `produced` type EventKind string @@ -54,21 +50,18 @@ type Event struct { Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` } -type eventForUnmarshal Event +type eventUnmarshal Event // UnmarshalJSON unmarshal Event object from json bytes func (e *Event) UnmarshalJSON(data []byte) error { - v := eventForUnmarshal{ - DataOnly: true, - Kind: EventKindConsumed, - } - err := json.Unmarshal(data, &v) - if err != nil { - return err - } + e.ApplyDefault() + return unmarshalObject("event", data, (*eventUnmarshal)(e)) +} - *e = Event(v) - return nil +// ApplyDefault set the default values for Event +func (e *Event) ApplyDefault() { + e.DataOnly = true + e.Kind = EventKindConsumed } // Correlation define event correlation rules for an event. Only used for `consumed` events @@ -104,21 +97,18 @@ type EventRef struct { // Specifies if the function should be invoked sync or async. Default is sync. // +kubebuilder:validation:Enum=async;sync // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` } -type eventRefForUnmarshal EventRef +type eventRefUnmarshal EventRef // UnmarshalJSON implements json.Unmarshaler func (e *EventRef) UnmarshalJSON(data []byte) error { - v := eventRefForUnmarshal{ - Invoke: InvokeKindSync, - } - err := json.Unmarshal(data, &v) - if err != nil { - return err - } + e.ApplyDefault() + return unmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) +} - *e = EventRef(v) - return nil +// ApplyDefault set the default values for Event Ref +func (e *EventRef) ApplyDefault() { + e.Invoke = InvokeKindSync } diff --git a/model/event_data_filter.go b/model/event_data_filter.go index b0b3e59..a69c7d3 100644 --- a/model/event_data_filter.go +++ b/model/event_data_filter.go @@ -14,12 +14,6 @@ package model -import ( - "bytes" - "encoding/json" - "fmt" -) - // EventDataFilter used to filter consumed event payloads. type EventDataFilter struct { // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' @@ -35,23 +29,15 @@ type EventDataFilter struct { ToStateData string `json:"toStateData,omitempty"` } -type eventDataFilterForUnmarshal EventDataFilter +type eventDataFilterUnmarshal EventDataFilter +// UnmarshalJSON implements json.Unmarshaler func (f *EventDataFilter) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - v := eventDataFilterForUnmarshal{ - UseData: true, - } - err := json.Unmarshal(data, &v) - if err != nil { - // TODO: replace the error message with correct type's name - return err - } + f.ApplyDefault() + return unmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) +} - *f = EventDataFilter(v) - return nil +// ApplyDefault set the default values for Event Data Filter +func (f *EventDataFilter) ApplyDefault() { + f.UseData = true } diff --git a/model/event_data_filter_test.go b/model/event_data_filter_test.go index 1d267ec..e4bf979 100644 --- a/model/event_data_filter_test.go +++ b/model/event_data_filter_test.go @@ -59,7 +59,7 @@ func TestEventDataFilterUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"data": 1, "toStateData": "2"}`, expect: EventDataFilter{}, - err: `json: cannot unmarshal number into Go struct field eventDataFilterForUnmarshal.data of type string`, + err: `eventDataFilter.data must be string`, }, } diff --git a/model/event_state.go b/model/event_state.go index d5068d0..1d6235a 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -16,7 +16,6 @@ package model import ( "encoding/json" - "fmt" ) // EventState await one or more events and perform actions when they are received. If defined as the @@ -49,20 +48,17 @@ func (e *EventState) MarshalJSON() ([]byte, error) { return custom, err } -type eventStateForUnmarshal EventState +type eventStateUnmarshal EventState // UnmarshalJSON unmarshal EventState object from json bytes func (e *EventState) UnmarshalJSON(data []byte) error { - v := eventStateForUnmarshal{ - Exclusive: true, - } - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("eventState value '%s' is not supported, it must be an object or string", string(data)) - } + e.ApplyDefault() + return unmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) +} - *e = EventState(v) - return nil +// ApplyDefault set the default values for Event State +func (e *EventState) ApplyDefault() { + e.Exclusive = true } // OnEvents define which actions are be performed for the one or more events. @@ -82,22 +78,17 @@ type OnEvents struct { EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` } -type onEventsForUnmarshal OnEvents +type onEventsUnmarshal OnEvents // UnmarshalJSON unmarshal OnEvents object from json bytes func (o *OnEvents) UnmarshalJSON(data []byte) error { - v := onEventsForUnmarshal{ - ActionMode: ActionModeSequential, - } - - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("onEvents value '%s' is not supported, it must be an object or string", string(data)) - } - - *o = OnEvents(v) + o.ApplyDefault() + return unmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) +} - return nil +// ApplyDefault set the default values for On Events +func (o *OnEvents) ApplyDefault() { + o.ActionMode = ActionModeSequential } // EventStateTimeout defines timeout settings for event state diff --git a/model/event_test.go b/model/event_test.go index 8f1665b..f557c61 100644 --- a/model/event_test.go +++ b/model/event_test.go @@ -49,7 +49,7 @@ func TestEventRefUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"invoke": 1}`, expect: EventRef{}, - err: `json: cannot unmarshal number into Go struct field eventRefForUnmarshal.invoke of type model.InvokeKind`, + err: `eventRef.invoke must be sync or async`, }, } for _, tc := range testCases { @@ -59,7 +59,7 @@ func TestEventRefUnmarshalJSON(t *testing.T) { if tc.err != "" { assert.Error(t, err) - assert.Regexp(t, tc.err, err) + assert.Equal(t, tc.err, err.Error()) return } @@ -99,7 +99,7 @@ func TestEventUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"dataOnly": "false", "kind": "produced"}`, expect: Event{}, - err: `json: cannot unmarshal string into Go struct field eventForUnmarshal.dataOnly of type bool`, + err: `event.dataOnly must be bool`, }, } for _, tc := range testCases { diff --git a/model/foreach_state.go b/model/foreach_state.go index a2ce19b..ad25b89 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -16,7 +16,6 @@ package model import ( "encoding/json" - "fmt" "k8s.io/apimachinery/pkg/util/intstr" ) @@ -24,6 +23,17 @@ import ( // ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) type ForEachModeType string +func (f ForEachModeType) KindValues() []string { + return []string{ + string(ForEachModeTypeSequential), + string(ForEachModeTypeParallel), + } +} + +func (f ForEachModeType) String() string { + return string(f) +} + const ( // ForEachModeTypeSequential specifies iterations should be done sequentially. ForEachModeTypeSequential ForEachModeType = "sequential" @@ -56,7 +66,7 @@ type ForEachState struct { // Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel. // +kubebuilder:validation:Enum=sequential;parallel // +kubebuilder:default=parallel - Mode ForEachModeType `json:"mode,omitempty"` + Mode ForEachModeType `json:"mode,omitempty" validate:"required,oneofkind"` } func (f *ForEachState) MarshalJSON() ([]byte, error) { @@ -71,19 +81,17 @@ func (f *ForEachState) MarshalJSON() ([]byte, error) { return custom, err } -type forEachStateForUnmarshal ForEachState +type forEachStateUnmarshal ForEachState +// UnmarshalJSON implements json.Unmarshaler func (f *ForEachState) UnmarshalJSON(data []byte) error { - v := forEachStateForUnmarshal{ - Mode: ForEachModeTypeParallel, - } - err := json.Unmarshal(data, &v) - if err != nil { - return fmt.Errorf("forEachState value '%s' is not supported, it must be an object or string", string(data)) - } + f.ApplyDefault() + return unmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) +} - *f = ForEachState(v) - return nil +// ApplyDefault set the default values for ForEach State +func (f *ForEachState) ApplyDefault() { + f.Mode = ForEachModeTypeParallel } // ForEachStateTimeout defines timeout settings for foreach state diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go index 3456935..a10f7a9 100644 --- a/model/foreach_state_test.go +++ b/model/foreach_state_test.go @@ -49,7 +49,7 @@ func TestForEachStateUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"mode": 1}`, expect: nil, - err: `forEachState value '{"mode": 1}' is not supported, it must be an object or string`, + err: `forEachState.mode must be sequential or parallel`, }, } for _, tc := range testCases { diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go index df01a32..1f6d5e7 100644 --- a/model/foreach_state_validator_test.go +++ b/model/foreach_state_validator_test.go @@ -34,7 +34,7 @@ func TestForEachStateStructLevelValidation(t *testing.T) { state: State{ BaseState: BaseState{ Name: "1", - Type: "2", + Type: StateTypeForEach, End: &End{ Terminate: true, }, @@ -54,7 +54,7 @@ func TestForEachStateStructLevelValidation(t *testing.T) { state: State{ BaseState: BaseState{ Name: "1", - Type: "2", + Type: StateTypeForEach, End: &End{ Terminate: true, }, @@ -78,7 +78,7 @@ func TestForEachStateStructLevelValidation(t *testing.T) { state: State{ BaseState: BaseState{ Name: "1", - Type: "2", + Type: StateTypeForEach, End: &End{ Terminate: true, }, @@ -102,7 +102,7 @@ func TestForEachStateStructLevelValidation(t *testing.T) { state: State{ BaseState: BaseState{ Name: "1", - Type: "2", + Type: StateTypeForEach, End: &End{ Terminate: true, }, diff --git a/model/object.go b/model/object.go index 614b396..a0e9fa0 100644 --- a/model/object.go +++ b/model/object.go @@ -68,7 +68,7 @@ func FromRaw(val interface{}) Object { return Object{Type: Raw, RawValue: custom} } -// UnmarshalJSON ... +// UnmarshalJSON implements json.Unmarshaler func (obj *Object) UnmarshalJSON(data []byte) error { if data[0] == '"' { obj.Type = String diff --git a/model/operation_state.go b/model/operation_state.go index 050676f..ebe97e0 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -44,21 +44,17 @@ func (a *OperationState) MarshalJSON() ([]byte, error) { return custom, err } -type operationStateForUnmarshal OperationState +type operationStateUnmarshal OperationState // UnmarshalJSON unmarshal OperationState object from json bytes func (o *OperationState) UnmarshalJSON(data []byte) error { + o.ApplyDefault() + return unmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) +} - v := operationStateForUnmarshal{ - ActionMode: ActionModeSequential, - } - err := json.Unmarshal(data, &v) - if err != nil { - return err - } - - *o = OperationState(v) - return nil +// ApplyDefault set the default values for Operation State +func (o *OperationState) ApplyDefault() { + o.ActionMode = ActionModeSequential } // OperationStateTimeout defines the specific timeout settings for operation state diff --git a/model/parallel_state.go b/model/parallel_state.go index 55d95f6..f46fa0a 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -16,7 +16,6 @@ package model import ( "encoding/json" - "fmt" "k8s.io/apimachinery/pkg/util/intstr" ) @@ -63,26 +62,17 @@ func (p *ParallelState) MarshalJSON() ([]byte, error) { return custom, err } -type parallelStateForUnmarshal ParallelState +type parallelStateUnmarshal ParallelState // UnmarshalJSON unmarshal ParallelState object from json bytes -func (ps *ParallelState) UnmarshalJSON(b []byte) error { - if len(b) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("no bytes to unmarshal") - } - - v := ¶llelStateForUnmarshal{ - CompletionType: CompletionTypeAllOf, - } - err := json.Unmarshal(b, v) - if err != nil { - return err - } - - *ps = ParallelState(*v) +func (ps *ParallelState) UnmarshalJSON(data []byte) error { + ps.ApplyDefault() + return unmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) +} - return nil +// ApplyDefault set the default values for Parallel State +func (ps *ParallelState) ApplyDefault() { + ps.CompletionType = CompletionTypeAllOf } // Branch Definition diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index 5de5f8c..c487629 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -14,12 +14,6 @@ package model -import ( - "bytes" - "encoding/json" - "fmt" -) - // StateExecTimeout defines workflow state execution timeout type StateExecTimeout struct { // Single state execution timeout, not including retries (ISO 8601 duration format) @@ -30,34 +24,9 @@ type StateExecTimeout struct { Total string `json:"total" validate:"required,iso8601duration"` } -// just define another type to unmarshal object, so the UnmarshalJSON will not be called recursively -type stateExecTimeoutForUnmarshal StateExecTimeout +type stateExecTimeoutUnmarshal StateExecTimeout // UnmarshalJSON unmarshal StateExecTimeout object from json bytes func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - // We must trim the leading space, because we use first byte to detect data's type - data = bytes.TrimSpace(data) - if len(data) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("no bytes to unmarshal") - } - - var err error - switch data[0] { - case '"': - s.Total, err = unmarshalString(data) - return err - case '{': - var v stateExecTimeoutForUnmarshal - err = json.Unmarshal(data, &v) - if err != nil { - return err - } - - *s = StateExecTimeout(v) - - return nil - } - - return fmt.Errorf("stateExecTimeout value '%s' is not supported, it must be an object or string", string(data)) + return unmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) } diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go index 0c972ce..4f8ff08 100644 --- a/model/state_exec_timeout_test.go +++ b/model/state_exec_timeout_test.go @@ -71,7 +71,7 @@ func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { data: `PT10S`, expect: &StateExecTimeout{}, - err: `stateExecTimeout value 'PT10S' is not supported, it must be an object or string`, + err: `stateExecTimeout has a syntax error "invalid character 'P' looking for beginning of value"`, }, { desp: "invalid total type", @@ -81,7 +81,7 @@ func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { }`, expect: &StateExecTimeout{}, - err: `json: cannot unmarshal number into Go struct field stateExecTimeoutForUnmarshal.total of type string`, + err: `stateExecTimeout.total must be string`, }, { desp: "invalid single type", @@ -94,7 +94,7 @@ func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { Single: "", Total: "PT10S", }, - err: `json: cannot unmarshal number into Go struct field stateExecTimeoutForUnmarshal.single of type string`, + err: `stateExecTimeout.single must be string`, }, } for _, tc := range testCases { diff --git a/model/states.go b/model/states.go index 67bcfa9..f005c24 100644 --- a/model/states.go +++ b/model/states.go @@ -23,6 +23,24 @@ import ( // StateType ... type StateType string +func (s StateType) KindValues() []string { + return []string{ + string(StateTypeDelay), + string(StateTypeEvent), + string(StateTypeOperation), + string(StateTypeParallel), + string(StateTypeSwitch), + string(StateTypeForEach), + string(StateTypeInject), + string(StateTypeCallback), + string(StateTypeSleep), + } +} + +func (s StateType) String() string { + return string(s) +} + const ( // StateTypeDelay ... StateTypeDelay StateType = "delay" @@ -55,7 +73,7 @@ type BaseState struct { // stateType can be any of delay, callback, event, foreach, inject, operation, parallel, sleep, switch // +kubebuilder:validation:Enum:=delay;callback;event;foreach;inject;operation;parallel;sleep;switch // +kubebuilder:validation:Required - Type StateType `json:"type" validate:"required"` + Type StateType `json:"type" validate:"required,oneofkind"` // States error handling and retries definitions. // +optional OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` @@ -96,44 +114,6 @@ func (b *BaseState) MarshalJSON() ([]byte, error) { return cus, err } -func (b *BaseState) UnmarshalJSON(data []byte) error { - baseState := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &baseState); err != nil { - return err - } - if err := unmarshalKey("id", baseState, &b.ID); err != nil { - return err - } - if err := unmarshalKey("name", baseState, &b.Name); err != nil { - return err - } - if err := unmarshalKey("type", baseState, &b.Type); err != nil { - return err - } - if err := unmarshalKey("onErrors", baseState, &b.OnErrors); err != nil { - return err - } - if err := unmarshalKey("transition", baseState, &b.Transition); err != nil { - return err - } - if err := unmarshalKey("stateDataFilter", baseState, &b.StateDataFilter); err != nil { - return err - } - if err := unmarshalKey("compensatedBy", baseState, &b.CompensatedBy); err != nil { - return err - } - if err := unmarshalKey("usedForCompensation", baseState, &b.UsedForCompensation); err != nil { - return err - } - if err := unmarshalKey("end", baseState, &b.End); err != nil { - return err - } - if err := unmarshalKey("metadata", baseState, &b.Metadata); err != nil { - return err - } - return nil -} - type State struct { BaseState `json:",inline"` // delayState Causes the workflow execution to delay for a specified duration. @@ -220,83 +200,79 @@ func (s *State) MarshalJSON() ([]byte, error) { return []byte(result), errs } -func (s *State) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &s.BaseState); err != nil { - return err - } +type unmarshalState State - mapState := map[string]interface{}{} - if err := json.Unmarshal(data, &mapState); err != nil { +// UnmarshalJSON implements json.Unmarshaler +func (s *State) UnmarshalJSON(data []byte) error { + if err := unmarshalObject("state", data, (*unmarshalState)(s)); err != nil { return err } - switch mapState["type"] { - case string(StateTypeDelay): + switch s.Type { + case StateTypeDelay: state := &DelayState{} if err := json.Unmarshal(data, state); err != nil { return err } s.DelayState = state - case string(StateTypeEvent): + case StateTypeEvent: state := &EventState{} if err := json.Unmarshal(data, state); err != nil { return err } s.EventState = state - case string(StateTypeOperation): + case StateTypeOperation: state := &OperationState{} - if err := json.Unmarshal(data, state); err != nil { + if err := unmarshalObject("states", data, state); err != nil { return err } s.OperationState = state - case string(StateTypeParallel): + case StateTypeParallel: state := &ParallelState{} if err := json.Unmarshal(data, state); err != nil { return err } s.ParallelState = state - case string(StateTypeSwitch): + case StateTypeSwitch: state := &SwitchState{} if err := json.Unmarshal(data, state); err != nil { return err } s.SwitchState = state - case string(StateTypeForEach): + case StateTypeForEach: state := &ForEachState{} if err := json.Unmarshal(data, state); err != nil { return err } s.ForEachState = state - case string(StateTypeInject): + case StateTypeInject: state := &InjectState{} if err := json.Unmarshal(data, state); err != nil { return err } s.InjectState = state - case string(StateTypeCallback): + case StateTypeCallback: state := &CallbackState{} if err := json.Unmarshal(data, state); err != nil { return err } s.CallbackState = state - case string(StateTypeSleep): + case StateTypeSleep: state := &SleepState{} if err := json.Unmarshal(data, state); err != nil { return err } s.SleepState = state - case nil: - return fmt.Errorf("state parameter 'type' not defined") default: - return fmt.Errorf("state type %v not supported", mapState["type"]) + return fmt.Errorf("states type %q not supported", s.Type.String()) } return nil } diff --git a/model/switch_state.go b/model/switch_state.go index 58cc8b5..9d05d7e 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -51,23 +51,21 @@ type DefaultCondition struct { End *End `json:"end,omitempty"` } -// UnmarshalJSON ... -func (e *DefaultCondition) UnmarshalJSON(data []byte) error { - type defCondUnmarshal DefaultCondition +type defaultConditionUnmarshal DefaultCondition - obj, str, err := primitiveOrStruct[string, defCondUnmarshal](data) +// UnmarshalJSON implements json.Unmarshaler +func (e *DefaultCondition) UnmarshalJSON(data []byte) error { + var nextState string + err := unmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) if err != nil { return err } - if obj == nil { - transition := &Transition{NextState: str} - e.Transition = transition - } else { - *e = DefaultCondition(*obj) + if nextState != "" { + e.Transition = &Transition{NextState: nextState} } - return nil + return err } func (s *SwitchState) MarshalJSON() ([]byte, error) { diff --git a/model/switch_state_test.go b/model/switch_state_test.go index c960f3c..e2f5c51 100644 --- a/model/switch_state_test.go +++ b/model/switch_state_test.go @@ -13,3 +13,98 @@ // limitations under the License. package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDefaultConditionUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect DefaultCondition + err string + } + + testCases := []testCase{ + { + desp: "json nextState success", + data: `{"transition": {"nextState": "next state"}}`, + expect: DefaultCondition{ + Transition: &Transition{ + NextState: "next state", + }, + }, + err: ``, + }, + { + desp: "invalid json nextState", + data: `{"transition": {"nextState": "next state}}`, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json nextState type", + data: `{"transition": {"nextState": true}}`, + err: `transition.nextState must be string`, + }, + { + desp: "transition json success", + data: `{"transition": "next state"}`, + expect: DefaultCondition{ + Transition: &Transition{ + NextState: "next state", + }, + }, + err: ``, + }, + { + desp: "invalid json transition", + data: `{"transition": "next state}`, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid json transition type", + data: `{"transition": true}`, + err: `transition must be string or object`, + }, + { + desp: "string success", + data: `"next state"`, + expect: DefaultCondition{ + Transition: &Transition{ + NextState: "next state", + }, + }, + err: ``, + }, + { + desp: "invalid string syntax", + data: `"next state`, + err: `unexpected end of JSON input`, + }, + { + desp: "invalid type", + data: `123`, + err: `defaultCondition must be string or object`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v DefaultCondition + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/util.go b/model/util.go index ebce2aa..1048602 100644 --- a/model/util.go +++ b/model/util.go @@ -17,114 +17,270 @@ package model import ( "bytes" "encoding/json" + "errors" "fmt" "net/http" "os" "path/filepath" - - "sigs.k8s.io/yaml" - + "reflect" "strings" "sync/atomic" + "time" + + "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" ) // +k8s:deepcopy-gen=false -const prefix = "file:/" +type Kind interface { + KindValues() []string + String() string +} -// TRUE used by bool fields that needs a boolean pointer -var TRUE = true +// TODO: Remove global variable +var httpClient = http.Client{Timeout: time.Duration(1) * time.Second} -// FALSE used by bool fields that needs a boolean pointer -var FALSE = false +type UnmarshalError struct { + err error + parameterName string + primitiveType reflect.Kind + objectType reflect.Kind +} -func getBytesFromFile(s string) (b []byte, err error) { - // #nosec - if resp, err := http.Get(s); err == nil { - defer resp.Body.Close() - buf := new(bytes.Buffer) - if _, err = buf.ReadFrom(resp.Body); err != nil { - return nil, err +func (e *UnmarshalError) Error() string { + if e.err == nil { + panic("unmarshalError fail") + } + + var syntaxErr *json.SyntaxError + var unmarshalTypeErr *json.UnmarshalTypeError + if errors.As(e.err, &syntaxErr) { + return fmt.Sprintf("%s has a syntax error %q", e.parameterName, syntaxErr.Error()) + + } else if errors.As(e.err, &unmarshalTypeErr) { + return e.unmarshalMessageError(unmarshalTypeErr) + } + + return e.err.Error() +} + +func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) string { + if err.Struct == "" && err.Field == "" { + primitiveTypeName := e.primitiveType.String() + var objectTypeName string + if e.objectType != reflect.Invalid { + switch e.objectType { + case reflect.Struct: + objectTypeName = "object" + case reflect.Map: + objectTypeName = "object" + case reflect.Slice: + objectTypeName = "array" + default: + objectTypeName = e.objectType.String() + } } - return buf.Bytes(), nil - } - s = strings.TrimPrefix(s, prefix) - - if !filepath.IsAbs(s) { - // The import file is an non-absolute path, we join it with include path - // TODO: if the file didn't find in any include path, we should report an error - for _, p := range IncludePaths() { - sn := filepath.Join(p, s) - if _, err := os.Stat(sn); err == nil { - s = sn - break + return fmt.Sprintf("%s must be %s or %s", e.parameterName, primitiveTypeName, objectTypeName) + + } else if err.Struct != "" && err.Field != "" { + var primitiveTypeName string + val := reflect.New(err.Type) + if valKinds, ok := val.Elem().Interface().(validator.Kind); ok { + values := valKinds.KindValues() + if len(values) <= 2 { + primitiveTypeName = strings.Join(values, " or ") + } else { + primitiveTypeName = fmt.Sprintf("%s, %s", strings.Join(values[:len(values)-2], ", "), strings.Join(values[len(values)-2:], " or ")) } + } else { + primitiveTypeName = err.Type.Name() } + + return fmt.Sprintf("%s.%s must be %s", e.parameterName, err.Field, primitiveTypeName) } - if b, err = os.ReadFile(filepath.Clean(s)); err != nil { - return nil, err + return err.Error() +} + +func loadExternalResource(url string) (b []byte, err error) { + index := strings.Index(url, "://") + if index == -1 { + b, err = getBytesFromFile(url) + } else { + scheme := url[:index] + switch scheme { + case "http", "https": + b, err = getBytesFromHttp(url) + case "file": + b, err = getBytesFromFile(url[index+3:]) + default: + return nil, fmt.Errorf("unsupported scheme: %q", scheme) + } + } + if err != nil { + return } // TODO: optimize this // NOTE: In specification, we can declare independent definitions with another file format, so // we must convert independently yaml source to json format data before unmarshal. - if strings.HasSuffix(s, ".yaml") || strings.HasSuffix(s, ".yml") { + if !json.Valid(b) { b, err = yaml.YAMLToJSON(b) if err != nil { return nil, err } + return b, nil } + return b, nil } -func requiresNotNilOrEmpty(value interface{}) string { - if value == nil { - return "" +func getBytesFromFile(path string) ([]byte, error) { + // if path is relative, search in include paths + if !filepath.IsAbs(path) { + paths := IncludePaths() + pathFound := false + for i := 0; i < len(paths) && !pathFound; i++ { + sn := filepath.Join(paths[i], path) + _, err := os.Stat(sn) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + return nil, err + } + } else { + path = sn + pathFound = true + } + } + if !pathFound { + return nil, fmt.Errorf("file not found: %q", path) + } } - return value.(string) + + return os.ReadFile(filepath.Clean(path)) } -// TODO: check the places that use unmarshalString if the case changes for primitiveOrStruct. -func unmarshalString(data []byte) (string, error) { - var value string - if err := json.Unmarshal(data, &value); err != nil { - return "", err +func getBytesFromHttp(url string) ([]byte, error) { + req, err := http.NewRequest(http.MethodGet, url, nil) + if err != nil { + return nil, err } - return value, nil + + resp, err := httpClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + buf := new(bytes.Buffer) + if _, err = buf.ReadFrom(resp.Body); err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +func unmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { + var valString string + err := unmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) + if err != nil || valString == "" { + return err + } + + // Assumes that the value inside `data` is a path to a known location. + // Returns the content of the file or a not nil error reference. + data, err = loadExternalResource(valString) + if err != nil { + return err + } + + data = bytes.TrimSpace(data) + if data[0] != '{' && data[0] != '[' { + return errors.New("invalid external resource definition") + } + + if data[0] == '[' && parameterName != "auth" && parameterName != "secrets" { + return errors.New("invalid external resource definition") + } + + data = bytes.TrimSpace(data) + if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" { + extractData := map[string]json.RawMessage{} + err = json.Unmarshal(data, &extractData) + if err != nil { + return &UnmarshalError{ + err: err, + parameterName: parameterName, + primitiveType: reflect.TypeOf(*valObject).Kind(), + } + } + + var ok bool + if data, ok = extractData[parameterName]; !ok { + return fmt.Errorf("external resource parameter not found: %q", parameterName) + } + } + + return unmarshalObject(parameterName, data, valObject) } -func primitiveOrStruct[T any, U any](data []byte) (valStruct *U, valPrimitive T, err error) { - if data[0] == '{' { - err = json.Unmarshal(data, &valStruct) +func unmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { + data = bytes.TrimSpace(data) + if len(data) == 0 { + // TODO: Normalize error messages + return fmt.Errorf("%s no bytes to unmarshal", parameterName) + } + + isObject := data[0] == '{' || data[0] == '[' + var err error + if isObject { + err = unmarshalObject(parameterName, data, valStruct) } else { - err = json.Unmarshal(data, &valPrimitive) + err = unmarshalPrimitive(parameterName, data, valPrimitive) + } + + var unmarshalError *UnmarshalError + if errors.As(err, &unmarshalError) { + unmarshalError.objectType = reflect.TypeOf(*valStruct).Kind() + unmarshalError.primitiveType = reflect.TypeOf(*valPrimitive).Kind() } - return + + return err } -func unmarshalKey(key string, data map[string]json.RawMessage, output interface{}) error { - if _, found := data[key]; found { - if err := json.Unmarshal(data[key], output); err != nil { - return fmt.Errorf("failed to unmarshall key '%s' with data'%s'", key, data[key]) +func unmarshalPrimitive[T string | bool](parameterName string, data []byte, value *T) error { + if value == nil { + return nil + } + + err := json.Unmarshal(data, value) + if err != nil { + return &UnmarshalError{ + err: err, + parameterName: parameterName, + primitiveType: reflect.TypeOf(*value).Kind(), } } + return nil } -// unmarshalFile same as calling unmarshalString following by getBytesFromFile. -// Assumes that the value inside `data` is a path to a known location. -// Returns the content of the file or a not nil error reference. -func unmarshalFile(data []byte) (b []byte, err error) { - filePath, err := unmarshalString(data) - if err != nil { - return nil, err +func unmarshalObject[U any](parameterName string, data []byte, value *U) error { + if value == nil { + return nil } - file, err := getBytesFromFile(filePath) + + err := json.Unmarshal(data, value) if err != nil { - return nil, err + return &UnmarshalError{ + err: err, + parameterName: parameterName, + objectType: reflect.TypeOf(*value).Kind(), + } } - return file, nil + + return nil } var defaultIncludePaths atomic.Value diff --git a/model/util_test.go b/model/util_test.go index 478c7e0..b81b315 100644 --- a/model/util_test.go +++ b/model/util_test.go @@ -16,8 +16,13 @@ package model import ( "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "path/filepath" "testing" + "github.com/serverlessworkflow/sdk-go/v2/test" "github.com/stretchr/testify/assert" ) @@ -26,6 +31,7 @@ func TestIncludePaths(t *testing.T) { assert.True(t, len(IncludePaths()) > 0) // update include paths + initialPaths := IncludePaths() paths := []string{"/root", "/path"} SetIncludePaths(paths) assert.Equal(t, IncludePaths(), paths) @@ -33,49 +39,265 @@ func TestIncludePaths(t *testing.T) { assert.PanicsWithError(t, "1 must be an absolute file path", assert.PanicTestFunc(func() { SetIncludePaths([]string{"1"}) })) + + SetIncludePaths(initialPaths) +} + +func Test_loadExternalResource(t *testing.T) { + SetIncludePaths(append(IncludePaths(), filepath.Join(test.CurrentProjectPath()))) + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + switch req.URL.Path { + case "/test.json": + _, err := rw.Write([]byte("{}")) + assert.NoError(t, err) + default: + t.Failed() + } + })) + defer server.Close() + httpClient = *server.Client() + + data, err := loadExternalResource(server.URL + "/test.json") + assert.NoError(t, err) + assert.Equal(t, "{}", string(data)) + + data, err = loadExternalResource("parser/testdata/eventdefs.yml") + assert.NoError(t, err) + assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) + + data, err = loadExternalResource("file://../parser/testdata/eventdefs.yml") + assert.NoError(t, err) + assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) + + data, err = loadExternalResource("./parser/testdata/eventdefs.yml") + assert.NoError(t, err) + assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) + + _, err = loadExternalResource("ftp://test.yml") + assert.ErrorContains(t, err, "unsupported scheme: \"ftp\"") +} + +func Test_unmarshalObjectOrFile(t *testing.T) { + t.Run("httptest", func(t *testing.T) { + type structString struct { + FieldValue string `json:"fieldValue"` + } + type listStructString []structString + + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + switch req.URL.Path { + case "/test.json": + _, err := rw.Write([]byte(`{"listStructString":[{"fieldValue": "value"}]}`)) + assert.NoError(t, err) + default: + t.Failed() + } + })) + defer server.Close() + httpClient = *server.Client() + + structValue := &structString{} + data := []byte(`"fieldValue": "value"`) + err := unmarshalObjectOrFile("structString", data, structValue) + assert.Error(t, err) + assert.Equal(t, &structString{}, structValue) + + listStructValue := &listStructString{} + data = []byte(`[{"fieldValue": "value"}]`) + err = unmarshalObjectOrFile("listStructString", data, listStructValue) + assert.NoError(t, err) + assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) + + listStructValue = &listStructString{} + data = []byte(fmt.Sprintf(`"%s/test.json"`, server.URL)) + err = unmarshalObjectOrFile("listStructString", data, listStructValue) + assert.NoError(t, err) + assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) + }) + + t.Run("file://", func(t *testing.T) { + retries := &Retries{} + data := []byte(`"file://../parser/testdata/applicationrequestretries.json"`) + err := unmarshalObjectOrFile("retries", data, retries) + assert.NoError(t, err) + }) + + t.Run("external url", func(t *testing.T) { + retries := &Retries{} + data := []byte(`"https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json"`) + err := unmarshalObjectOrFile("retries", data, retries) + assert.NoError(t, err) + }) } func Test_primitiveOrMapType(t *testing.T) { type dataMap map[string]json.RawMessage - data := []byte(`"value":true`) - _, _, err := primitiveOrStruct[bool, dataMap](data) - assert.Error(t, err) - data = []byte(`{value":true}`) - _, _, err = primitiveOrStruct[bool, dataMap](data) - assert.Error(t, err) + t.Run("unmarshal", func(t *testing.T) { + var valBool bool + valMap := &dataMap{} + data := []byte(`"value":true`) + err := unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.Error(t, err) - data = []byte(`value":true}`) - _, _, err = primitiveOrStruct[bool, dataMap](data) - assert.Error(t, err) + valBool = false + valMap = &dataMap{} + data = []byte(`{value":true}`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.Error(t, err) - data = []byte(`"true"`) - _, _, err = primitiveOrStruct[bool, dataMap](data) - assert.Error(t, err) + valBool = false + valMap = &dataMap{} + data = []byte(`value":true}`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.Error(t, err) - data = []byte(`true`) - valMap, valBool, err := primitiveOrStruct[bool, dataMap](data) - assert.NoError(t, err) - assert.Nil(t, valMap) - assert.True(t, valBool) + valBool = false + valMap = &dataMap{} + data = []byte(`"true"`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.Error(t, err) - data = []byte(`"true"`) - valMap, valString, err := primitiveOrStruct[string, dataMap](data) - assert.NoError(t, err) - assert.Nil(t, valMap) - assert.Equal(t, `true`, valString) + valBool = false + valMap = &dataMap{} + data = []byte(`true`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.NoError(t, err) + assert.Equal(t, &dataMap{}, valMap) + assert.True(t, valBool) - data = []byte(`{"value":true}`) - valMap, valBool, err = primitiveOrStruct[bool, dataMap](data) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) - assert.False(t, valBool) + valString := "" + valMap = &dataMap{} + data = []byte(`"true"`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) + assert.NoError(t, err) + assert.Equal(t, &dataMap{}, valMap) + assert.Equal(t, `true`, valString) - data = []byte(`{"value": "true"}`) - valMap, valBool, err = primitiveOrStruct[bool, dataMap](data) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) - assert.False(t, valBool) + valBool = false + valMap = &dataMap{} + data = []byte(`{"value":true}`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.NoError(t, err) + assert.NotNil(t, valMap) + assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) + assert.False(t, valBool) + + valBool = false + valMap = &dataMap{} + data = []byte(`{"value": "true"}`) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + assert.NoError(t, err) + assert.NotNil(t, valMap) + assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) + assert.False(t, valBool) + }) + + t.Run("test personalized syntaxError error message", func(t *testing.T) { + type structString struct { + FieldValue string `json:"fieldValue"` + } + + var valString string + valStruct := &structString{} + data := []byte(`{"fieldValue": "value"`) + err := unmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) + assert.Error(t, err) + assert.Equal(t, "structBool has a syntax error \"unexpected end of JSON input\"", err.Error()) + + data = []byte(`{\n "fieldValue": value\n}`) + err = unmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) + assert.Error(t, err) + assert.Equal(t, "structBool has a syntax error \"invalid character '\\\\\\\\' looking for beginning of object key string\"", err.Error()) + // assert.Equal(t, `structBool value '{"fieldValue": value}' is not supported, it has a syntax error "invalid character 'v' looking for beginning of value"`, err.Error()) + }) + + t.Run("test personalized unmarshalTypeError error message", func(t *testing.T) { + type structBool struct { + FieldValue bool `json:"fieldValue"` + } + + var valBool bool + valStruct := &structBool{} + data := []byte(`{ + "fieldValue": "true" +}`) + err := unmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) + assert.Error(t, err) + assert.Equal(t, "structBool.fieldValue must be bool", err.Error()) + + valBool = false + valStruct = &structBool{} + data = []byte(`"true"`) + err = unmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) + assert.Error(t, err) + assert.Equal(t, "structBool must be bool or object", err.Error()) + }) + + t.Run("check json with spaces", func(t *testing.T) { + var valBool bool + valStruct := &dataMap{} + data := []byte(` {"value": "true"} `) + err := unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + assert.NoError(t, err) + + valBool = false + valStruct = &dataMap{} + data = []byte(` true `) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + assert.NoError(t, err) + + valString := "" + valStruct = &dataMap{} + data = []byte(` "true" `) + err = unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + assert.NoError(t, err) + }) + + t.Run("check tabs", func(t *testing.T) { + valString := "" + valStruct := &dataMap{} + data := []byte(string('\t') + `"true"` + string('\t')) + err := unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + assert.NoError(t, err) + + valBool := false + valStruct = &dataMap{} + data = []byte(string('\t') + `true` + string('\t')) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + assert.NoError(t, err) + }) + + t.Run("check breakline", func(t *testing.T) { + valString := "" + valStruct := &dataMap{} + data := []byte(string('\n') + `"true"` + string('\n')) + err := unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + assert.NoError(t, err) + + valBool := false + valStruct = &dataMap{} + data = []byte(string('\n') + `true` + string('\n')) + err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + assert.NoError(t, err) + }) + + t.Run("test recursivity and default value", func(t *testing.T) { + valStruct := &structBool{} + data := []byte(`{"fieldValue": false}`) + err := json.Unmarshal(data, valStruct) + assert.NoError(t, err) + assert.False(t, valStruct.FieldValue) + }) +} + +type structBool struct { + FieldValue bool `json:"fieldValue"` +} + +type structBoolUnmarshal structBool + +func (s *structBool) UnmarshalJSON(data []byte) error { + s.FieldValue = true + return unmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) } diff --git a/model/workflow.go b/model/workflow.go index 282da8c..c3b9694 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -15,14 +15,20 @@ package model import ( - "bytes" "encoding/json" - "fmt" ) // InvokeKind defines how the target is invoked. type InvokeKind string +func (i InvokeKind) KindValues() []string { + return []string{string(InvokeKindSync), string(InvokeKindAsync)} +} + +func (i InvokeKind) String() string { + return string(i) +} + const ( // InvokeKindSync meaning that worfklow execution should wait until the target completes. InvokeKindSync InvokeKind = "sync" @@ -108,7 +114,7 @@ type BaseWorkflow struct { Timeouts *Timeouts `json:"timeouts,omitempty"` // Defines checked errors that can be explicitly handled during workflow execution. // +optional - Errors []Error `json:"errors,omitempty"` + Errors Errors `json:"errors,omitempty"` // If "true", workflow instances is not terminated when there are no active execution paths. // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" // +optional @@ -127,44 +133,25 @@ type BaseWorkflow struct { // +kubebuilder:validation:Schemaless // +kubebuilder:pruning:PreserveUnknownFields // +optional - Auth AuthArray `json:"auth,omitempty" validate:"omitempty"` + Auth Auths `json:"auth,omitempty" validate:"omitempty"` } -type AuthArray []Auth +type Auths []Auth -func (r *AuthArray) UnmarshalJSON(data []byte) error { - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } +type authsUnmarshal Auths - switch data[0] { - case '"': - return r.unmarshalFile(data) - case '[': - return r.unmarshalMany(data) - } - - return fmt.Errorf("auth value '%s' is not supported, it must be an array or string", string(data)) +// UnmarshalJSON implements json.Unmarshaler +func (r *Auths) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) } -func (r *AuthArray) unmarshalFile(data []byte) error { - b, err := unmarshalFile(data) - if err != nil { - return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) - } - - return r.unmarshalMany(b) -} +type Errors []Error -func (r *AuthArray) unmarshalMany(data []byte) error { - var auths []Auth - err := json.Unmarshal(data, &auths) - if err != nil { - return fmt.Errorf("authDefinitions value '%s' is not supported, it must be an object or string", string(data)) - } +type errorsUnmarshal Errors - *r = auths - return nil +// UnmarshalJSON implements json.Unmarshaler +func (e *Errors) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) } // Workflow base definition @@ -174,118 +161,71 @@ type Workflow struct { // +kubebuilder:pruning:PreserveUnknownFields States []State `json:"states" validate:"required,min=1,dive"` // +optional - Events []Event `json:"events,omitempty"` + Events Events `json:"events,omitempty"` // +optional - Functions []Function `json:"functions,omitempty"` + Functions Functions `json:"functions,omitempty"` // +optional - Retries []Retry `json:"retries,omitempty" validate:"dive"` + Retries Retries `json:"retries,omitempty" validate:"dive"` } +type workflowUnmarshal Workflow + // UnmarshalJSON implementation for json Unmarshal function for the Workflow type func (w *Workflow) UnmarshalJSON(data []byte) error { - if err := json.Unmarshal(data, &w.BaseWorkflow); err != nil { - return err - } - - workflowMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &workflowMap); err != nil { + w.ApplyDefault() + err := unmarshalObject("workflow", data, (*workflowUnmarshal)(w)) + if err != nil { return err } - var rawStates []json.RawMessage - if _, ok := workflowMap["states"]; ok { - if err := json.Unmarshal(workflowMap["states"], &rawStates); err != nil { - return err - } - } - - w.States = make([]State, len(rawStates)) - for i, rawState := range rawStates { - if err := json.Unmarshal(rawState, &w.States[i]); err != nil { - return err - } - } - - // if the start is not defined, use the first state - if w.BaseWorkflow.Start == nil && len(w.States) > 0 { - w.BaseWorkflow.Start = &Start{ + if w.Start == nil && len(w.States) > 0 { + w.Start = &Start{ StateName: w.States[0].Name, } } - if _, ok := workflowMap["events"]; ok { - if err := json.Unmarshal(workflowMap["events"], &w.Events); err != nil { - var s string - if err := json.Unmarshal(workflowMap["events"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - - m := make(map[string][]Event) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Events = m["events"] - } - } - if _, ok := workflowMap["functions"]; ok { - if err := json.Unmarshal(workflowMap["functions"], &w.Functions); err != nil { - var s string - if err := json.Unmarshal(workflowMap["functions"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - m := make(map[string][]Function) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Functions = m["functions"] - } - } - if _, ok := workflowMap["retries"]; ok { - if err := json.Unmarshal(workflowMap["retries"], &w.Retries); err != nil { - var s string - if err := json.Unmarshal(workflowMap["retries"], &s); err != nil { - return err - } - var nestedData []byte - if nestedData, err = getBytesFromFile(s); err != nil { - return err - } - m := make(map[string][]Retry) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Retries = m["retries"] - } - } - if _, ok := workflowMap["errors"]; ok { - if err := json.Unmarshal(workflowMap["errors"], &w.Errors); err != nil { - nestedData, err := unmarshalFile(workflowMap["errors"]) - if err != nil { - return err - } - m := make(map[string][]Error) - if err := json.Unmarshal(nestedData, &m); err != nil { - return err - } - w.Errors = m["errors"] - } - } - w.setDefaults() return nil } -func (w *Workflow) setDefaults() { - if len(w.ExpressionLang) == 0 { - w.ExpressionLang = JqExpressionLang - } +// ApplyDefault set the default values for Workflow +func (w *Workflow) ApplyDefault() { + w.ExpressionLang = JqExpressionLang +} + +type States []State + +type statesUnmarshal States + +// UnmarshalJSON implements json.Unmarshaler +func (s *States) UnmarshalJSON(data []byte) error { + return unmarshalObject("states", data, (*statesUnmarshal)(s)) +} + +type Events []Event + +type eventsUnmarshal Events + +// UnmarshalJSON implements json.Unmarshaler +func (e *Events) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) +} + +type Functions []Function + +type functionsUnmarshal Functions + +// UnmarshalJSON implements json.Unmarshaler +func (f *Functions) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) +} + +type Retries []Retry + +type retriesUnmarshal Retries + +// UnmarshalJSON implements json.Unmarshaler +func (r *Retries) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) } // Timeouts ... @@ -308,37 +248,11 @@ type Timeouts struct { EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` } -// UnmarshalJSON ... -func (t *Timeouts) UnmarshalJSON(data []byte) error { - timeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &timeout); err != nil { - // assumes it's a reference to a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &t); err != nil { - return err - } - return nil - } - if err := unmarshalKey("workflowExecTimeout", timeout, &t.WorkflowExecTimeout); err != nil { - return err - } - if err := unmarshalKey("stateExecTimeout", timeout, &t.StateExecTimeout); err != nil { - return err - } - if err := unmarshalKey("actionExecTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err - } - if err := unmarshalKey("branchExecTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err - } - if err := unmarshalKey("eventTimeout", timeout, &t.ActionExecTimeout); err != nil { - return err - } +type timeoutsUnmarshal Timeouts - return nil +// UnmarshalJSON implements json.Unmarshaler +func (t *Timeouts) UnmarshalJSON(data []byte) error { + return unmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) } // WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration @@ -356,29 +270,17 @@ type WorkflowExecTimeout struct { RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` } -// UnmarshalJSON ... +type workflowExecTimeoutUnmarshal WorkflowExecTimeout + +// UnmarshalJSON implements json.Unmarshaler func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { - execTimeout := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &execTimeout); err != nil { - w.Duration, err = unmarshalString(data) - if err != nil { - return err - } - } else { - if err := unmarshalKey("duration", execTimeout, &w.Duration); err != nil { - return err - } - if err := unmarshalKey("interrupt", execTimeout, &w.Interrupt); err != nil { - return err - } - if err := unmarshalKey("runBefore", execTimeout, &w.RunBefore); err != nil { - return err - } - } - if len(w.Duration) == 0 { - w.Duration = UnlimitedTimeout - } - return nil + w.ApplyDefault() + return unmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) +} + +// ApplyDefault set the default values for Workflow Exec Timeout +func (w *WorkflowExecTimeout) ApplyDefault() { + w.Duration = UnlimitedTimeout } // Error declaration for workflow definitions @@ -406,24 +308,11 @@ type Start struct { Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` } -// UnmarshalJSON ... -func (s *Start) UnmarshalJSON(data []byte) error { - startMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &startMap); err != nil { - s.StateName, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - if err := unmarshalKey("stateName", startMap, &s.StateName); err != nil { - return err - } - if err := unmarshalKey("schedule", startMap, &s.Schedule); err != nil { - return err - } +type startUnmarshal Start - return nil +// UnmarshalJSON implements json.Unmarshaler +func (s *Start) UnmarshalJSON(data []byte) error { + return unmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) } // Schedule ... @@ -442,28 +331,11 @@ type Schedule struct { Timezone string `json:"timezone,omitempty"` } -// UnmarshalJSON ... -func (s *Schedule) UnmarshalJSON(data []byte) error { - scheduleMap := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &scheduleMap); err != nil { - s.Interval, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - - if err := unmarshalKey("interval", scheduleMap, &s.Interval); err != nil { - return err - } - if err := unmarshalKey("cron", scheduleMap, &s.Cron); err != nil { - return err - } - if err := unmarshalKey("timezone", scheduleMap, &s.Timezone); err != nil { - return err - } +type scheduleUnmarshal Schedule - return nil +// UnmarshalJSON implements json.Unmarshaler +func (s *Schedule) UnmarshalJSON(data []byte) error { + return unmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) } // Cron ... @@ -476,21 +348,11 @@ type Cron struct { ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601duration"` } +type cronUnmarshal Cron + // UnmarshalJSON custom unmarshal function for Cron func (c *Cron) UnmarshalJSON(data []byte) error { - cron := make(map[string]interface{}) - if err := json.Unmarshal(data, &cron); err != nil { - c.Expression, err = unmarshalString(data) - if err != nil { - return err - } - return nil - } - - c.Expression = requiresNotNilOrEmpty(cron["expression"]) - c.ValidUntil = requiresNotNilOrEmpty(cron["validUntil"]) - - return nil + return unmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) } // Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). @@ -508,21 +370,11 @@ type Transition struct { Compensate bool `json:"compensate,omitempty"` } -// UnmarshalJSON ... -func (e *Transition) UnmarshalJSON(data []byte) error { - type defTransitionUnmarshal Transition +type transitionUnmarshal Transition - obj, str, err := primitiveOrStruct[string, defTransitionUnmarshal](data) - if err != nil { - return err - } - - if obj == nil { - e.NextState = str - } else { - *e = Transition(*obj) - } - return nil +// UnmarshalJSON implements json.Unmarshaler +func (t *Transition) UnmarshalJSON(data []byte) error { + return unmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) } // OnError ... @@ -562,22 +414,11 @@ type End struct { ContinueAs *ContinueAs `json:"continueAs,omitempty"` } -// UnmarshalJSON ... -func (e *End) UnmarshalJSON(data []byte) error { - type endUnmarshal End - end, endBool, err := primitiveOrStruct[bool, endUnmarshal](data) - if err != nil { - return err - } +type endUnmarshal End - if end == nil { - e.Terminate = endBool - e.Compensate = false - } else { - *e = End(*end) - } - - return nil +// UnmarshalJSON implements json.Unmarshaler +func (e *End) UnmarshalJSON(data []byte) error { + return unmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) } // ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) @@ -598,31 +439,11 @@ type ContinueAs struct { WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` } -type continueAsForUnmarshal ContinueAs +type continueAsUnmarshal ContinueAs +// UnmarshalJSON implements json.Unmarshaler func (c *ContinueAs) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - var err error - switch data[0] { - case '"': - c.WorkflowID, err = unmarshalString(data) - return err - case '{': - v := continueAsForUnmarshal{} - err = json.Unmarshal(data, &v) - if err != nil { - return err - } - - *c = ContinueAs(v) - return nil - } - - return fmt.Errorf("continueAs value '%s' is not supported, it must be an object or string", string(data)) + return unmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) } // ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a @@ -654,70 +475,43 @@ type DataInputSchema struct { // +kubebuilder:validation:Required Schema string `json:"schema" validate:"required"` // +kubebuilder:validation:Required - FailOnValidationErrors *bool `json:"failOnValidationErrors" validate:"required"` + FailOnValidationErrors bool `json:"failOnValidationErrors" validate:"required"` } -// UnmarshalJSON ... +type dataInputSchemaUnmarshal DataInputSchema + +// UnmarshalJSON implements json.Unmarshaler func (d *DataInputSchema) UnmarshalJSON(data []byte) error { - dataInSchema := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &dataInSchema); err != nil { - d.Schema, err = unmarshalString(data) - if err != nil { - return err - } - d.FailOnValidationErrors = &TRUE - return nil - } - if err := unmarshalKey("schema", dataInSchema, &d.Schema); err != nil { - return err - } - if err := unmarshalKey("failOnValidationErrors", dataInSchema, &d.FailOnValidationErrors); err != nil { - return err - } + d.ApplyDefault() + return unmarshalPrimitiveOrObject("dataInputSchema", data, &d.Schema, (*dataInputSchemaUnmarshal)(d)) +} - return nil +// ApplyDefault set the default values for Data Input Schema +func (d *DataInputSchema) ApplyDefault() { + d.FailOnValidationErrors = true } // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your // Workflow Expressions. type Secrets []string -// UnmarshalJSON ... +type secretsUnmarshal Secrets + +// UnmarshalJSON implements json.Unmarshaler func (s *Secrets) UnmarshalJSON(data []byte) error { - var secretArray []string - if err := json.Unmarshal(data, &secretArray); err != nil { - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &secretArray); err != nil { - return err - } - } - *s = secretArray - return nil + return unmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) } // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. type Constants struct { // Data represents the generic structure of the constants value // +optional - Data map[string]json.RawMessage `json:",omitempty"` + Data ConstantsData `json:",omitempty"` } -// UnmarshalJSON ... +// UnmarshalJSON implements json.Unmarshaler func (c *Constants) UnmarshalJSON(data []byte) error { - constantData := make(map[string]json.RawMessage) - if err := json.Unmarshal(data, &constantData); err != nil { - // assumes it's a reference to a file - file, err := unmarshalFile(data) - if err != nil { - return err - } - if err := json.Unmarshal(file, &constantData); err != nil { - return err - } - } - c.Data = constantData - return nil + return unmarshalObjectOrFile("constants", data, &c.Data) } + +type ConstantsData map[string]json.RawMessage diff --git a/model/workflow_ref.go b/model/workflow_ref.go index 04cc6c6..f0ec215 100644 --- a/model/workflow_ref.go +++ b/model/workflow_ref.go @@ -14,12 +14,6 @@ package model -import ( - "bytes" - "encoding/json" - "fmt" -) - // WorkflowRef holds a reference for a workflow definition type WorkflowRef struct { // Sub-workflow unique id @@ -33,7 +27,7 @@ type WorkflowRef struct { // +kubebuilder:validation:Enum=async;sync // +kubebuilder:default=sync // +optional - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneof=async sync"` + Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` // onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke // is 'async'. Defaults to terminate. // +kubebuilder:validation:Enum=terminate;continue @@ -41,37 +35,16 @@ type WorkflowRef struct { OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` } -type workflowRefForUnmarshal WorkflowRef +type workflowRefUnmarshal WorkflowRef // UnmarshalJSON implements json.Unmarshaler func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - return fmt.Errorf("no bytes to unmarshal") - } - - var err error - switch data[0] { - case '"': - s.WorkflowID, err = unmarshalString(data) - if err != nil { - return err - } - s.Invoke, s.OnParentComplete = InvokeKindSync, "terminate" - return nil - case '{': - v := workflowRefForUnmarshal{ - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - } - err = json.Unmarshal(data, &v) - if err != nil { - // TODO: replace the error message with correct type's name - return err - } - *s = WorkflowRef(v) - return nil - } + s.ApplyDefault() + return unmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) +} - return fmt.Errorf("subFlowRef value '%s' is not supported, it must be an object or string", string(data)) +// ApplyDefault set the default values for Workflow Ref +func (s *WorkflowRef) ApplyDefault() { + s.Invoke = InvokeKindSync + s.OnParentComplete = "terminate" } diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go index 6a27e62..4788a16 100644 --- a/model/workflow_ref_test.go +++ b/model/workflow_ref_test.go @@ -80,13 +80,13 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { desp: "invalid json format", data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, expect: WorkflowRef{}, - err: `json: cannot unmarshal number into Go struct field workflowRefForUnmarshal.workflowId of type string`, + err: "subFlowRef.workflowId must be string", }, { desp: "invalid string or object", data: `1`, expect: WorkflowRef{}, - err: `subFlowRef value '1' is not supported, it must be an object or string`, + err: `subFlowRef must be string or object`, }, } for _, tc := range testCases { @@ -151,7 +151,7 @@ func TestWorkflowRefValidate(t *testing.T) { Invoke: "sync1", OnParentComplete: "terminate", }, - err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneof' tag`, + err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneofkind' tag`, }, { desp: "invalid onParentComplete", diff --git a/model/workflow_test.go b/model/workflow_test.go index c9ad3e9..86a0ecc 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -16,6 +16,9 @@ package model import ( "encoding/json" + "fmt" + "net/http" + "net/http/httptest" "testing" "github.com/stretchr/testify/assert" @@ -153,7 +156,7 @@ func TestContinueAsUnmarshalJSON(t *testing.T) { desp: "invalid object format", data: `{"workflowId": 1}`, expect: ContinueAs{}, - err: `json: cannot unmarshal number into Go struct field continueAsForUnmarshal.workflowId of type string`, + err: `continueAs.workflowId must be string`, }, } for _, tc := range testCases { @@ -193,7 +196,7 @@ func TestEndUnmarshalJSON(t *testing.T) { desp: "string fail", data: `"true"`, expect: End{}, - err: `json: cannot unmarshal string into Go value of type bool`, + err: `end must be bool or object`, }, { desp: `object success`, @@ -203,6 +206,14 @@ func TestEndUnmarshalJSON(t *testing.T) { }, err: ``, }, + { + desp: `object fail`, + data: `{"terminate": "true"}`, + expect: End{ + Terminate: true, + }, + err: `end.terminate must be bool`, + }, { desp: `object key invalid`, data: `{"terminate_parameter_invalid": true}`, @@ -215,6 +226,324 @@ func TestEndUnmarshalJSON(t *testing.T) { var v End err := json.Unmarshal([]byte(tc.data), &v) + if tc.err != "" { + assert.Error(t, err) + assert.Equal(t, tc.err, err.Error()) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestWorkflowExecTimeoutUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect WorkflowExecTimeout + err string + } + + testCases := []testCase{ + { + desp: "string success", + data: `"PT15M"`, + expect: WorkflowExecTimeout{ + Duration: "PT15M", + }, + err: ``, + }, + { + desp: "string fail", + data: `PT15M`, + expect: WorkflowExecTimeout{ + Duration: "PT15M", + }, + err: `invalid character 'P' looking for beginning of value`, + }, + { + desp: `object success`, + data: `{"duration": "PT15M"}`, + expect: WorkflowExecTimeout{ + Duration: "PT15M", + }, + err: ``, + }, + { + desp: `object fail`, + data: `{"duration": PT15M}`, + expect: WorkflowExecTimeout{ + Duration: "PT15M", + }, + err: `invalid character 'P' looking for beginning of value`, + }, + { + desp: `object key invalid`, + data: `{"duration_invalid": "PT15M"}`, + expect: WorkflowExecTimeout{ + Duration: "unlimited", + }, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v WorkflowExecTimeout + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestStartUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect Start + err string + } + + testCases := []testCase{ + { + desp: "string success", + data: `"start state"`, + expect: Start{ + StateName: "start state", + }, + err: ``, + }, + { + desp: "string fail", + data: `start state`, + expect: Start{ + StateName: "start state", + }, + err: `invalid character 's' looking for beginning of value`, + }, + { + desp: `object success`, + data: `{"stateName": "start state"}`, + expect: Start{ + StateName: "start state", + }, + err: ``, + }, + { + desp: `object fail`, + data: `{"stateName": start state}`, + expect: Start{ + StateName: "start state", + }, + err: `invalid character 's' looking for beginning of value`, + }, + { + desp: `object key invalid`, + data: `{"stateName_invalid": "start state"}`, + expect: Start{ + StateName: "", + }, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v Start + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestCronUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect Cron + err string + } + + testCases := []testCase{ + { + desp: "string success", + data: `"0 15,30,45 * ? * *"`, + expect: Cron{ + Expression: "0 15,30,45 * ? * *", + }, + err: ``, + }, + { + desp: "string fail", + data: `0 15,30,45 * ? * *`, + expect: Cron{ + Expression: "0 15,30,45 * ? * *", + }, + err: `invalid character '1' after top-level value`, + }, + { + desp: `object success`, + data: `{"expression": "0 15,30,45 * ? * *"}`, + expect: Cron{ + Expression: "0 15,30,45 * ? * *", + }, + err: ``, + }, + { + desp: `object fail`, + data: `{"expression": "0 15,30,45 * ? * *}`, + expect: Cron{ + Expression: "0 15,30,45 * ? * *", + }, + err: `unexpected end of JSON input`, + }, + { + desp: `object key invalid`, + data: `{"expression_invalid": "0 15,30,45 * ? * *"}`, + expect: Cron{}, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v Cron + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestTransitionUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect Transition + err string + } + + testCases := []testCase{ + { + desp: "string success", + data: `"next state"`, + expect: Transition{ + NextState: "next state", + }, + err: ``, + }, + { + desp: `object success`, + data: `{"nextState": "next state"}`, + expect: Transition{ + NextState: "next state", + }, + err: ``, + }, + { + desp: `object fail`, + data: `{"nextState": "next state}`, + expect: Transition{ + NextState: "next state", + }, + err: `unexpected end of JSON input`, + }, + { + desp: `object key invalid`, + data: `{"nextState_invalid": "next state"}`, + expect: Transition{}, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v Transition + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} + +func TestDataInputSchemaUnmarshalJSON(t *testing.T) { + type testCase struct { + desp string + data string + expect DataInputSchema + err string + } + + testCases := []testCase{ + { + desp: "string success", + data: `"schema name"`, + expect: DataInputSchema{ + Schema: "schema name", + FailOnValidationErrors: true, + }, + err: ``, + }, + { + desp: `object success`, + data: `{"schema": "schema name"}`, + expect: DataInputSchema{ + Schema: "schema name", + FailOnValidationErrors: true, + }, + err: ``, + }, + { + desp: `object fail`, + data: `{"schema": "schema name}`, + expect: DataInputSchema{ + Schema: "schema name", + FailOnValidationErrors: true, + }, + err: `unexpected end of JSON input`, + }, + { + desp: `object key invalid`, + data: `{"schema_invalid": "schema name"}`, + expect: DataInputSchema{ + FailOnValidationErrors: true, + }, + err: ``, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v DataInputSchema + err := json.Unmarshal([]byte(tc.data), &v) + if tc.err != "" { assert.Error(t, err) assert.Regexp(t, tc.err, err) @@ -226,3 +555,93 @@ func TestEndUnmarshalJSON(t *testing.T) { }) } } + +func TestConstantsUnmarshalJSON(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + switch req.URL.Path { + case "/test.json": + _, err := rw.Write([]byte(`{"testkey":"testvalue"}`)) + assert.NoError(t, err) + default: + t.Failed() + } + })) + defer server.Close() + httpClient = *server.Client() + + type testCase struct { + desp string + data string + expect Constants + err string + } + testCases := []testCase{ + { + desp: "object success", + data: `{"testkey":"testvalue}`, + expect: Constants{ + Data: ConstantsData{ + "testkey": []byte(`"testvalue"`), + }, + }, + err: `unexpected end of JSON input`, + }, + { + desp: "object success", + data: `[]`, + expect: Constants{ + Data: ConstantsData{ + "testkey": []byte(`"testvalue"`), + }, + }, + // TODO: improve message: field is empty + err: `constants must be string or object`, + }, + { + desp: "object success", + data: `{"testkey":"testvalue"}`, + expect: Constants{ + Data: ConstantsData{ + "testkey": []byte(`"testvalue"`), + }, + }, + err: ``, + }, + { + desp: "file success", + data: fmt.Sprintf(`"%s/test.json"`, server.URL), + expect: Constants{ + Data: ConstantsData{ + "testkey": []byte(`"testvalue"`), + }, + }, + err: ``, + }, + { + desp: "file success", + data: `"uri_invalid"`, + expect: Constants{ + Data: ConstantsData{ + "testkey": []byte(`"testvalue"`), + }, + }, + err: `file not found: "uri_invalid"`, + }, + } + + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + var v Constants + err := json.Unmarshal([]byte(tc.data), &v) + + if tc.err != "" { + assert.Error(t, err) + assert.Equal(t, tc.err, err.Error()) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.expect, v) + }) + } +} diff --git a/model/workflow_validator.go b/model/workflow_validator.go index 68f8096..2ea7cf5 100644 --- a/model/workflow_validator.go +++ b/model/workflow_validator.go @@ -39,7 +39,7 @@ func continueAsStructLevelValidation(structLevel validator.StructLevel) { // WorkflowStructLevelValidation custom validator func workflowStructLevelValidation(structLevel validator.StructLevel) { // unique name of the auth methods - // NOTE: we cannot add the custom validation of auth to AuthArray + // NOTE: we cannot add the custom validation of auth to Auth // because `RegisterStructValidation` only works with struct type wf := structLevel.Current().Interface().(Workflow) dict := map[string]bool{} diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go index 451d87f..c305898 100644 --- a/model/workflow_validator_test.go +++ b/model/workflow_validator_test.go @@ -26,7 +26,7 @@ var workflowStructDefault = Workflow{ BaseWorkflow: BaseWorkflow{ ID: "id", SpecVersion: "0.8", - Auth: AuthArray{ + Auth: Auths{ { Name: "auth name", }, diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 41da8f5..4a3e961 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -106,10 +106,10 @@ func (in *Auth) DeepCopy() *Auth { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in AuthArray) DeepCopyInto(out *AuthArray) { +func (in Auths) DeepCopyInto(out *Auths) { { in := &in - *out = make(AuthArray, len(*in)) + *out = make(Auths, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -117,12 +117,12 @@ func (in AuthArray) DeepCopyInto(out *AuthArray) { } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthArray. -func (in AuthArray) DeepCopy() AuthArray { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. +func (in Auths) DeepCopy() Auths { if in == nil { return nil } - out := new(AuthArray) + out := new(Auths) in.DeepCopyInto(out) return *out } @@ -254,7 +254,7 @@ func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { } if in.Auth != nil { in, out := &in.Auth, &out.Auth - *out = make(AuthArray, len(*in)) + *out = make(Auths, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -538,11 +538,6 @@ func (in *DataCondition) DeepCopy() *DataCondition { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { *out = *in - if in.FailOnValidationErrors != nil { - in, out := &in.FailOnValidationErrors, &out.FailOnValidationErrors - *out = new(bool) - **out = **in - } return } diff --git a/parser/parser_test.go b/parser/parser_test.go index 27d430d..014ba2f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -673,7 +673,7 @@ states: "description": "Determine if applicant request is valid", "start": "Hello State", "specVersion": "0.8", - "auth": "./testdata/workflows/urifiles/auth.json", + "auth": "testdata/workflows/urifiles/auth.json", "states": [ { "name": "Hello State", @@ -719,7 +719,7 @@ states: `)) assert.NotNil(t, err) - assert.Equal(t, "auth value '123' is not supported, it must be an array or string", err.Error()) + assert.Equal(t, "auth must be string or array", err.Error()) assert.Nil(t, workflow) }) } diff --git a/parser/testdata/applicationrequestfunctions.json b/parser/testdata/applicationrequestfunctions.json index 9416a78..bafc861 100644 --- a/parser/testdata/applicationrequestfunctions.json +++ b/parser/testdata/applicationrequestfunctions.json @@ -1,8 +1,8 @@ { - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/application.json#emailRejection" - } - ] - } \ No newline at end of file + "functions": [ + { + "name": "sendRejectionEmailFunction", + "operation": "http://myapis.org/application.json#emailRejection" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/applicationrequestretries.json b/parser/testdata/applicationrequestretries.json index 510e49a..40f83b5 100644 --- a/parser/testdata/applicationrequestretries.json +++ b/parser/testdata/applicationrequestretries.json @@ -1,9 +1,9 @@ { - "retries": [ + "retries": [ { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" + "name": "TimeoutRetryStrategy", + "delay": "PT1M", + "maxAttempts": "5" } - ] -} + ] +} \ No newline at end of file diff --git a/parser/testdata/eventdefs.yml b/parser/testdata/eventdefs.yml index 6541662..dd2c3b7 100644 --- a/parser/testdata/eventdefs.yml +++ b/parser/testdata/eventdefs.yml @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - events: - name: PaymentReceivedEvent type: payment.receive diff --git a/parser/testdata/functiondefs.json b/parser/testdata/functiondefs.json index f9a3b06..fc7dd94 100644 --- a/parser/testdata/functiondefs.json +++ b/parser/testdata/functiondefs.json @@ -1,16 +1,16 @@ { - "functions": [ - { - "name": "checkFundsAvailability", - "operation": "file://myapis/billingapis.json#checkFunds" - }, - { - "name": "sendSuccessEmail", - "operation": "file://myapis/emailapis.json#paymentSuccess" - }, - { - "name": "sendInsufficientFundsEmail", - "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" - } - ] + "functions": [ + { + "name": "checkFundsAvailability", + "operation": "file://myapis/billingapis.json#checkFunds" + }, + { + "name": "sendSuccessEmail", + "operation": "file://myapis/emailapis.json#paymentSuccess" + }, + { + "name": "sendInsufficientFundsEmail", + "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" + } + ] } \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json index d5b17c7..e5316d9 100644 --- a/parser/testdata/secrets.json +++ b/parser/testdata/secrets.json @@ -1,3 +1,4 @@ + [ "SECRET1", "SECRET2", diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json index 8f95b6e..99b373c 100644 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ b/parser/testdata/workflows/applicationrequest-issue69.json @@ -5,7 +5,7 @@ "description": "Determine if applicant request is valid", "start": "CheckApplication", "specVersion": "0.8", - "auth": "./testdata/workflows/urifiles/auth.json", + "auth": "file://testdata/workflows/urifiles/auth.json", "functions": [ { "name": "sendRejectionEmailFunction", diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json index 52b18a8..309cf8f 100644 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ b/parser/testdata/workflows/applicationrequest.rp.json @@ -7,8 +7,8 @@ "start": { "stateName": "CheckApplication" }, - "functions": "testdata/applicationrequestfunctions.json", - "retries": "testdata/applicationrequestretries.json", + "functions": "file://testdata/applicationrequestfunctions.json", + "retries": "file://testdata/applicationrequestretries.json", "states": [ { "name": "CheckApplication", diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json index a160546..589ad36 100644 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.p.json @@ -7,7 +7,7 @@ "start": { "stateName": "Greet" }, - "events": "testdata/eventbasedgreetingevents.json", + "events": "file://testdata/eventbasedgreetingevents.json", "functions": [ { "name": "greetingFunction", diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml index ca02f40..00f04f3 100644 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ b/parser/testdata/workflows/greetings-constants-file.sw.yaml @@ -19,7 +19,7 @@ description: Greet Someone specVersion: "0.8" start: stateName: Greet -constants: "testdata/constantsDogs.json" +constants: "file://testdata/constantsDogs.json" functions: - name: greetingFunction operation: file://myapis/greetingapis.json#greeting diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml index 3259810..27d00e1 100644 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ b/parser/testdata/workflows/greetings-secret-file.sw.yaml @@ -19,7 +19,7 @@ description: Greet Someone specVersion: "0.8" start: stateName: Greet -secrets: "testdata/secrets.json" +secrets: "file://testdata/secrets.json" functions: - name: greetingFunction operation: file://myapis/greetingapis.json#greeting diff --git a/parser/testdata/workflows/paymentconfirmation.json b/parser/testdata/workflows/paymentconfirmation.json index 2051126..815a73c 100644 --- a/parser/testdata/workflows/paymentconfirmation.json +++ b/parser/testdata/workflows/paymentconfirmation.json @@ -4,8 +4,8 @@ "specVersion": "0.8", "name": "Payment Confirmation Workflow", "description": "Performs Payment Confirmation", - "functions": "functiondefs.json", - "events": "eventdefs.yml", + "functions": "file://functiondefs.json", + "events": "file://eventdefs.yml", "states": [ { "name": "PaymentReceived", diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json index b00b964..9040643 100644 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json @@ -4,7 +4,7 @@ "version": "1.0", "specVersion": "0.8", "start": "ConsumeReading", - "timeouts": "testdata/timeouts.json", + "timeouts": "file://testdata/timeouts.json", "keepActive": true, "states": [ { diff --git a/parser/testdata/workflows/urifiles/auth.json b/parser/testdata/workflows/urifiles/auth.json index a3a62aa..ff211df 100644 --- a/parser/testdata/workflows/urifiles/auth.json +++ b/parser/testdata/workflows/urifiles/auth.json @@ -1,17 +1,17 @@ [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } + { + "name": "testAuth", + "scheme": "bearer", + "properties": { + "token": "test_token" } + }, + { + "name": "testAuth2", + "scheme": "basic", + "properties": { + "username": "test_user", + "password": "test_pwd" + } + } ] \ No newline at end of file diff --git a/validator/validator.go b/validator/validator.go index d83309c..846203d 100644 --- a/validator/validator.go +++ b/validator/validator.go @@ -23,6 +23,11 @@ import ( // TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation +type Kind interface { + KindValues() []string + String() string +} + var validate *validator.Validate func init() { @@ -32,6 +37,12 @@ func init() { if err != nil { panic(err) } + + err = validate.RegisterValidation("oneofkind", oneOfKind) + if err != nil { + panic(err) + } + } // GetValidator gets the default validator.Validate reference @@ -49,3 +60,15 @@ func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) err := ValidateISO8601TimeDuration(fl.Field().String()) return err == nil } + +func oneOfKind(fl validator.FieldLevel) bool { + if val, ok := fl.Field().Interface().(Kind); ok { + for _, value := range val.KindValues() { + if value == val.String() { + return true + } + } + } + + return false +} diff --git a/validator/validator_test.go b/validator/validator_test.go index b81df33..a0b273e 100644 --- a/validator/validator_test.go +++ b/validator/validator_test.go @@ -57,3 +57,61 @@ func TestValidateISO8601TimeDuration(t *testing.T) { }) } } + +type testKind string + +func (k testKind) KindValues() []string { + return []string{"test1", "test2"} +} + +func (k testKind) String() string { + return string(k) +} + +type testKindInvalid string + +func (k testKindInvalid) AllValuesInvalid() []string { + return []string{"test1", "test2"} +} + +func (k testKindInvalid) String() string { + return string(k) +} + +func Test_oneOfKind(t *testing.T) { + validate := GetValidator() + + t.Run("kind without kindInvalid", func(t *testing.T) { + spec := struct { + f interface{} + t string + }{ + f: testKindInvalid("test1"), t: "oneofkind", + } + + errs := validate.Var(spec.f, spec.t) + assert.Error(t, errs) + + }) + + t.Run("kind", func(t *testing.T) { + spec := struct { + f testKind + t string + }{ + f: testKind("test1"), t: "oneofkind", + } + errs := validate.Var(spec.f, spec.t) + assert.NoError(t, errs) + + spec = struct { + f testKind + t string + }{ + f: testKind("test3"), t: "oneofkind", + } + errs = validate.Var(spec.f, spec.t) + assert.Error(t, errs) + + }) +} From 6278ce5fb94ce514ede552ba98f45626666cb0ab Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 4 May 2023 13:52:12 -0300 Subject: [PATCH 064/110] Empty fields Marshaled as "field":null (#172) fixes https://github.com/serverlessworkflow/sdk-go/issues/171 Signed-off-by: Spolti --- model/states.go | 4 +- model/switch_state.go | 30 +++-- model/util.go | 4 +- model/zz_generated.deepcopy.go | 194 ++++++++++++++++++++++++++++----- parser/parser_test.go | 4 +- 5 files changed, 191 insertions(+), 45 deletions(-) diff --git a/model/states.go b/model/states.go index f005c24..42c7b48 100644 --- a/model/states.go +++ b/model/states.go @@ -104,7 +104,7 @@ type BaseState struct { func (b *BaseState) MarshalJSON() ([]byte, error) { type Alias BaseState if b == nil { - return []byte("null"), nil + return nil, nil } cus, err := json.Marshal(struct { *Alias @@ -148,7 +148,7 @@ type State struct { func (s *State) MarshalJSON() ([]byte, error) { if s == nil { - return []byte("null"), nil + return nil, nil } r := []byte("") var errs error diff --git a/model/switch_state.go b/model/switch_state.go index 9d05d7e..70f1b28 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -16,6 +16,7 @@ package model import ( "encoding/json" + "strings" ) // SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. @@ -36,6 +37,23 @@ type SwitchState struct { Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` } +func (s *SwitchState) MarshalJSON() ([]byte, error) { + type Alias SwitchState + custom, err := json.Marshal(&struct { + *Alias + Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` + }{ + Alias: (*Alias)(s), + Timeouts: s.Timeouts, + }) + + // Avoid marshal empty objects as null. + st := strings.Replace(string(custom), "\"eventConditions\":null,", "", 1) + st = strings.Replace(st, "\"dataConditions\":null,", "", 1) + st = strings.Replace(st, "\"end\":null,", "", -1) + return []byte(st), err +} + // DefaultCondition Can be either a transition or end definition type DefaultCondition struct { // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). @@ -68,18 +86,6 @@ func (e *DefaultCondition) UnmarshalJSON(data []byte) error { return err } -func (s *SwitchState) MarshalJSON() ([]byte, error) { - type Alias SwitchState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - return custom, err -} - // SwitchStateTimeout defines the specific timeout settings for switch state type SwitchStateTimeout struct { // Default workflow state execution timeout (ISO 8601 duration format) diff --git a/model/util.go b/model/util.go index 1048602..2ae4226 100644 --- a/model/util.go +++ b/model/util.go @@ -31,8 +31,8 @@ import ( "sigs.k8s.io/yaml" ) +// Kind ... // +k8s:deepcopy-gen=false - type Kind interface { KindValues() []string String() string @@ -41,6 +41,8 @@ type Kind interface { // TODO: Remove global variable var httpClient = http.Client{Timeout: time.Duration(1) * time.Second} +// UnmarshalError ... +// +k8s:deepcopy-gen=false type UnmarshalError struct { err error parameterName string diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 4a3e961..d04a11b 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -105,28 +105,6 @@ func (in *Auth) DeepCopy() *Auth { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Auths) DeepCopyInto(out *Auths) { - { - in := &in - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. -func (in Auths) DeepCopy() Auths { - if in == nil { - return nil - } - out := new(Auths) - in.DeepCopyInto(out) - return *out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *AuthProperties) DeepCopyInto(out *AuthProperties) { *out = *in @@ -158,6 +136,28 @@ func (in *AuthProperties) DeepCopy() *AuthProperties { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Auths) DeepCopyInto(out *Auths) { + { + in := &in + *out = make(Auths, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. +func (in Auths) DeepCopy() Auths { + if in == nil { + return nil + } + out := new(Auths) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BaseState) DeepCopyInto(out *BaseState) { *out = *in @@ -223,7 +223,7 @@ func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { if in.DataInputSchema != nil { in, out := &in.DataInputSchema, &out.DataInputSchema *out = new(DataInputSchema) - (*in).DeepCopyInto(*out) + **out = **in } if in.Secrets != nil { in, out := &in.Secrets, &out.Secrets @@ -242,7 +242,7 @@ func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { } if in.Errors != nil { in, out := &in.Errors, &out.Errors - *out = make([]Error, len(*in)) + *out = make(Errors, len(*in)) copy(*out, *in) } if in.Metadata != nil { @@ -426,7 +426,7 @@ func (in *Constants) DeepCopyInto(out *Constants) { *out = *in if in.Data != nil { in, out := &in.Data, &out.Data - *out = make(map[string]json.RawMessage, len(*in)) + *out = make(ConstantsData, len(*in)) for key, val := range *in { var outVal []byte if val == nil { @@ -452,6 +452,36 @@ func (in *Constants) DeepCopy() *Constants { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in ConstantsData) DeepCopyInto(out *ConstantsData) { + { + in := &in + *out = make(ConstantsData, len(*in)) + for key, val := range *in { + var outVal []byte + if val == nil { + (*out)[key] = nil + } else { + in, out := &val, &outVal + *out = make(json.RawMessage, len(*in)) + copy(*out, *in) + } + (*out)[key] = outVal + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConstantsData. +func (in ConstantsData) DeepCopy() ConstantsData { + if in == nil { + return nil + } + out := new(ConstantsData) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ContinueAs) DeepCopyInto(out *ContinueAs) { *out = *in @@ -637,6 +667,26 @@ func (in *Error) DeepCopy() *Error { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Errors) DeepCopyInto(out *Errors) { + { + in := &in + *out = make(Errors, len(*in)) + copy(*out, *in) + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Errors. +func (in Errors) DeepCopy() Errors { + if in == nil { + return nil + } + out := new(Errors) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Event) DeepCopyInto(out *Event) { *out = *in @@ -790,6 +840,28 @@ func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Events) DeepCopyInto(out *Events) { + { + in := &in + *out = make(Events, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Events. +func (in Events) DeepCopy() Events { + if in == nil { + return nil + } + out := new(Events) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ForEachState) DeepCopyInto(out *ForEachState) { *out = *in @@ -884,6 +956,28 @@ func (in *FunctionRef) DeepCopy() *FunctionRef { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Functions) DeepCopyInto(out *Functions) { + { + in := &in + *out = make(Functions, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Functions. +func (in Functions) DeepCopy() Functions { + if in == nil { + return nil + } + out := new(Functions) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *InjectState) DeepCopyInto(out *InjectState) { *out = *in @@ -1186,6 +1280,28 @@ func (in *ProduceEvent) DeepCopy() *ProduceEvent { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in Retries) DeepCopyInto(out *Retries) { + { + in := &in + *out = make(Retries, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retries. +func (in Retries) DeepCopy() Retries { + if in == nil { + return nil + } + out := new(Retries) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Retry) DeepCopyInto(out *Retry) { *out = *in @@ -1423,6 +1539,28 @@ func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in States) DeepCopyInto(out *States) { + { + in := &in + *out = make(States, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new States. +func (in States) DeepCopy() States { + if in == nil { + return nil + } + out := new(States) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SwitchState) DeepCopyInto(out *SwitchState) { *out = *in @@ -1542,21 +1680,21 @@ func (in *Workflow) DeepCopyInto(out *Workflow) { } if in.Events != nil { in, out := &in.Events, &out.Events - *out = make([]Event, len(*in)) + *out = make(Events, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } if in.Functions != nil { in, out := &in.Functions, &out.Functions - *out = make([]Function, len(*in)) + *out = make(Functions, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } if in.Retries != nil { in, out := &in.Retries, &out.Retries - *out = make([]Retry, len(*in)) + *out = make(Retries, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } diff --git a/parser/parser_test.go b/parser/parser_test.go index 014ba2f..5913ea2 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -956,10 +956,10 @@ states: assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) // Switch State - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"dataConditions\":null,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) // Switch State with string DefaultCondition - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"eventConditions\":null,\"dataConditions\":[{\"condition\":\"${ true }\",\"end\":null,\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"end\":null,\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) + assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"dataConditions\":[{\"condition\":\"${ true }\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) // Foreach State assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"transition\":{\"nextState\":\"HelloInject\"},\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) From 1e70f47d58043b60ceb24b062a232fccf738aebb Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 4 May 2023 15:08:23 -0300 Subject: [PATCH 065/110] Fix small typo on the kubebuilder FunctionType's validation (#173) Signed-off-by: Spolti --- model/function.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/model/function.go b/model/function.go index c48dfeb..49e23ab 100644 --- a/model/function.go +++ b/model/function.go @@ -52,9 +52,9 @@ type Function struct { // #. // +kubebuilder:validation:Required Operation string `json:"operation" validate:"required,oneof=rest rpc expression"` - // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `asyncapi`, `asyncapi` or `asyncapi`. + // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. // Default is `rest`. - // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;asyncapi;asyncapi;asyncapi;custom + // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;odata;asyncapi;custom // +kubebuilder:default=rest Type FunctionType `json:"type,omitempty"` // References an auth definition name to be used to access to resource defined in the operation parameter. From 2b1db96890a873a9994fcc16da213588dc4ea7c3 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 8 May 2023 15:24:46 -0300 Subject: [PATCH 066/110] Update version matrix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0f3870b..64ead0c 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.2) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.2.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.2) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From a8f9da826c7e5beea125bffc41fed64ef2d6581b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Tue, 9 May 2023 16:21:47 -0300 Subject: [PATCH 067/110] Support to WebAssembly (#174) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrΓ© R. de Miranda --- model/util.go | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/model/util.go b/model/util.go index 2ae4226..645b4f5 100644 --- a/model/util.go +++ b/model/util.go @@ -23,6 +23,7 @@ import ( "os" "path/filepath" "reflect" + "runtime" "strings" "sync/atomic" "time" @@ -139,6 +140,10 @@ func loadExternalResource(url string) (b []byte, err error) { } func getBytesFromFile(path string) ([]byte, error) { + if WebAssembly() { + return nil, fmt.Errorf("unsupported open file") + } + // if path is relative, search in include paths if !filepath.IsAbs(path) { paths := IncludePaths() @@ -288,11 +293,15 @@ func unmarshalObject[U any](parameterName string, data []byte, value *U) error { var defaultIncludePaths atomic.Value func init() { + // No execute set include path to suport webassembly + if WebAssembly() { + return + } + wd, err := os.Getwd() if err != nil { panic(err) } - SetIncludePaths([]string{wd}) } @@ -311,3 +320,7 @@ func SetIncludePaths(paths []string) { defaultIncludePaths.Store(paths) } + +func WebAssembly() bool { + return runtime.GOOS == "js" && runtime.GOARCH == "wasm" +} From bb71213fccf034f47fd4b1b605fe784be1852f5d Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 19 May 2023 16:13:14 -0300 Subject: [PATCH 068/110] Add spolti to the SDK Go maintainers (#175) Signed-off-by: Ricardo Zanini --- MAINTAINERS.md | 4 +++- OWNERS | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index f618c14..54af970 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,3 +1,5 @@ # Serverless Workflow Go SDK Maintainers -* [Ricardo Zanini](https://github.com/ricardozanini) \ No newline at end of file +* [Ricardo Zanini](https://github.com/ricardozanini) +* [Filippe Spolti](https://github.com/spolti) +* \ No newline at end of file diff --git a/OWNERS b/OWNERS index 61177ce..bc971b8 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,7 @@ # List of usernames who may use /lgtm reviewers: - ricardozanini +- spolti # List of usernames who may use /approve approvers: From 5110906e33f4bb74830e0a8f9ae84bb1c9794683 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 6 Jul 2023 13:53:06 -0300 Subject: [PATCH 069/110] JSON or as a YAML file: workflow.states must be int32 (#178) * JSON or as a YAML file: workflow.states must be int32 Fixes https://github.com/serverlessworkflow/sdk-go/issues/177 Signed-off-by: Spolti * add a full test scenario with the compensate workflow Signed-off-by: Spolti * move workflow to testdata directory Signed-off-by: Spolti --------- Signed-off-by: Spolti --- model/object.go | 19 +++- parser/parser_test.go | 19 +++- parser/testdata/workflows/compensate.sw.json | 99 +++++++++++++++++++ .../workflows/greetings-v08-spec.sw.yaml | 1 + 4 files changed, 130 insertions(+), 8 deletions(-) create mode 100644 parser/testdata/workflows/compensate.sw.json diff --git a/model/object.go b/model/object.go index a0e9fa0..10f4395 100644 --- a/model/object.go +++ b/model/object.go @@ -33,10 +33,11 @@ import ( // // +kubebuilder:validation:Type=object type Object struct { - Type Type `json:"type,inline"` - IntVal int32 `json:"intVal,inline"` - StrVal string `json:"strVal,inline"` - RawValue json.RawMessage `json:"rawValue,inline"` + Type Type `json:"type,inline"` + IntVal int32 `json:"intVal,inline"` + StrVal string `json:"strVal,inline"` + RawValue json.RawMessage `json:"rawValue,inline"` + BoolValue bool `json:"boolValue,inline"` } type Type int64 @@ -45,6 +46,7 @@ const ( Integer Type = iota String Raw + Boolean ) func FromInt(val int) Object { @@ -58,6 +60,10 @@ func FromString(val string) Object { return Object{Type: String, StrVal: val} } +func FromBool(val bool) Object { + return Object{Type: Boolean, BoolValue: val} +} + func FromRaw(val interface{}) Object { custom, err := json.Marshal(val) if err != nil { @@ -73,6 +79,9 @@ func (obj *Object) UnmarshalJSON(data []byte) error { if data[0] == '"' { obj.Type = String return json.Unmarshal(data, &obj.StrVal) + } else if data[0] == 't' || data[0] == 'f' { + obj.Type = Boolean + return json.Unmarshal(data, &obj.BoolValue) } else if data[0] == '{' { obj.Type = Raw return json.Unmarshal(data, &obj.RawValue) @@ -86,6 +95,8 @@ func (obj Object) MarshalJSON() ([]byte, error) { switch obj.Type { case String: return []byte(fmt.Sprintf(`%q`, obj.StrVal)), nil + case Boolean: + return []byte(fmt.Sprintf(`%t`, obj.BoolValue)), nil case Integer: return []byte(fmt.Sprintf(`%d`, obj.IntVal)), nil case Raw: diff --git a/parser/parser_test.go b/parser/parser_test.go index 5913ea2..be0ac4d 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -16,7 +16,6 @@ package parser import ( "encoding/json" - "fmt" "os" "path/filepath" "strings" @@ -526,7 +525,8 @@ func TestFromFile(t *testing.T) { // Inject state assert.Equal(t, "HelloInject", w.States[7].Name) assert.Equal(t, model.StateTypeInject, w.States[7].Type) - assert.Equal(t, map[string]model.Object{"result": model.FromString("Hello World, last state!")}, w.States[7].InjectState.Data) + assert.Equal(t, model.FromString("Hello World, last state!"), w.States[7].InjectState.Data["result"]) + assert.Equal(t, model.FromBool(false), w.States[7].InjectState.Data["boolValue"]) assert.Equal(t, "PT11M", w.States[7].InjectState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT22M", w.States[7].InjectState.Timeouts.StateExecTimeout.Single) @@ -930,10 +930,8 @@ states: terminate: true `)) assert.Nil(t, err) - fmt.Println(err) assert.NotNil(t, workflow) b, err := json.Marshal(workflow) - assert.Nil(t, err) // workflow and auth metadata @@ -1023,4 +1021,17 @@ states: assert.Regexp(t, `validation for \'DataConditions\' failed on the \'required\' tag`, err) assert.Nil(t, workflow) }) + + t.Run("Test complex workflow with compensate transitions", func(t *testing.T) { + workflow, err := FromFile("./testdata/workflows/compensate.sw.json") + + assert.Nil(t, err) + assert.NotNil(t, workflow) + b, err := json.Marshal(workflow) + assert.Nil(t, err) + + workflow = nil + err = json.Unmarshal(b, &workflow) + assert.Nil(t, err) + }) } diff --git a/parser/testdata/workflows/compensate.sw.json b/parser/testdata/workflows/compensate.sw.json new file mode 100644 index 0000000..9f6ab1f --- /dev/null +++ b/parser/testdata/workflows/compensate.sw.json @@ -0,0 +1,99 @@ +{ + "id": "compensation", + "version": "1.0", + "name": "Workflow Error example", + "description": "An example of how compensation works", + "specVersion": "0.8", + "start": "printStatus", + "functions": [ + { + "name": "PrintOutput", + "type": "custom", + "operation": "sysout" + } + ], + "states": [ + { + "name": "printStatus", + "type": "inject", + "data": { + "compensated": false + }, + "compensatedBy": "compensating", + "transition": "branch" + }, + { + "name": "branch", + "type": "switch", + "dataConditions": [ + { + "condition": ".shouldCompensate==true", + "transition": { + "nextState": "finish_compensate", + "compensate": true + } + }, + { + "condition": ".shouldCompensate==false", + "transition": { + "nextState": "finish_not_compensate", + "compensate": false + } + } + ], + "defaultCondition": { + "end": true + } + }, + { + "name": "compensating", + "usedForCompensation": true, + "type": "inject", + "data": { + "compensated": true + }, + "transition": "compensating_more" + }, + { + "name": "compensating_more", + "usedForCompensation": true, + "type": "inject", + "data": { + "compensating_more": "Real Betis Balompie" + }, + "end": true + }, + { + "name": "finish_compensate", + "type": "operation", + "actions": [ + { + "name": "finish_compensate_sysout", + "functionRef": { + "refName": "PrintOutput", + "arguments": { + "message": "completed" + } + } + } + ], + "end": true + }, + { + "name": "finish_not_compensate", + "type": "operation", + "actions": [ + { + "name": "finish_not_compensate_sysout", + "functionRef": { + "refName": "PrintOutput", + "arguments": { + "message": "completed" + } + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index 13b0d75..ff4b21f 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -170,6 +170,7 @@ states: type: inject data: result: Hello World, last state! + boolValue: false timeouts: stateExecTimeout: total: PT11M From 4afc5f310fe575b3108e771c8ae11ddde4308e31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Tue, 11 Jul 2023 09:21:20 -0300 Subject: [PATCH 070/110] Validate the refs exists (#164) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add validator with context to validate the refs (functions, events, retries, etc) exists Signed-off-by: AndrΓ© R. de Miranda * Valid errors exist and change unique_struct to unique Signed-off-by: AndrΓ© R. de Miranda * Fix lint Signed-off-by: AndrΓ© R. de Miranda * Add transition and compensation validations. Refactor state exists Signed-off-by: AndrΓ© R. de Miranda * Json ignore field Signed-off-by: AndrΓ© R. de Miranda * Fix tests Signed-off-by: AndrΓ© R. de Miranda * Add validations: OnEvent.EventRefs, EventCondition.EventRef, and FunctionType Signed-off-by: AndrΓ© R. de Miranda * Fix tests Signed-off-by: AndrΓ© R. de Miranda * Replace oneof to oneofkind, and improve the error message Signed-off-by: AndrΓ© R. de Miranda * Validation refactoring for each struct to have its test case. Revision suggestions Signed-off-by: AndrΓ© R. de Miranda * Add validation oneofkind validation auth struct Signed-off-by: AndrΓ© R. de Miranda * Add new tests and improve error description Signed-off-by: AndrΓ© R. de Miranda * Add new unit tests, refactor intstr validator, and add new validation description Signed-off-by: AndrΓ© R. de Miranda * Remove reflection from validation Signed-off-by: AndrΓ© R. de Miranda * Remove commented code Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- hack/deepcopy-gen.sh | 1 + model/action.go | 8 +- model/action_data_filter.go | 4 +- model/action_data_filter_validator_test.go | 22 + model/action_test.go | 73 --- model/action_validator.go | 58 ++ model/action_validator_test.go | 200 ++++++ model/auth.go | 38 +- model/auth_validator_test.go | 210 ++++++ model/callback_state.go | 2 +- model/callback_state_test.go | 96 --- model/callback_state_validator_test.go | 116 ++++ model/delay_state_test.go | 73 --- model/delay_state_validator_test.go | 68 ++ model/event.go | 21 +- model/event_data_filter.go | 4 +- model/event_data_filter_validator_test.go | 22 + model/event_state.go | 10 +- model/event_state_validator.go | 39 ++ model/event_state_validator_test.go | 189 ++++++ model/event_validator.go | 20 +- model/event_validator_test.go | 218 +++++- model/foreach_state.go | 4 +- model/foreach_state_validator.go | 20 +- model/foreach_state_validator_test.go | 214 +++--- model/function.go | 37 +- model/function_validator_test.go | 74 +++ model/inject_state_validator_test.go | 28 + model/operation_state.go | 8 +- model/operation_state_validator_test.go | 121 ++++ model/parallel_state.go | 17 +- model/parallel_state_validator.go | 24 +- model/parallel_state_validator_test.go | 338 ++++++---- model/retry.go | 13 + model/retry_validator.go | 1 + model/retry_validator_test.go | 129 ++-- model/sleep_state_test.go | 61 -- model/sleep_state_validator_test.go | 95 +++ model/state_exec_timeout.go | 4 +- model/state_exec_timeout_test.go | 64 -- model/state_exec_timeout_validator_test.go | 95 +++ model/states.go | 6 +- model/states_validator.go | 28 +- model/states_validator_test.go | 200 +++--- model/switch_state.go | 12 +- model/switch_state_validator.go | 31 +- model/switch_state_validator_test.go | 455 ++++++------- model/workflow.go | 88 ++- model/workflow_ref.go | 25 +- model/workflow_ref_test.go | 75 --- model/workflow_ref_validator_test.go | 68 ++ model/workflow_test.go | 3 +- model/workflow_validator.go | 225 +++++-- model/workflow_validator_test.go | 619 +++++++++++++----- model/zz_generated.deepcopy.go | 82 ++- parser/parser.go | 8 +- parser/parser_test.go | 76 ++- .../customerbankingtransactions.json | 2 +- .../workflows/customercreditcheck.json | 4 + .../eventbasedgreetingexclusive.sw.json | 4 + .../workflows/greetings-v08-spec.sw.yaml | 71 +- .../workflows/patientonboarding.sw.yaml | 4 +- .../workflows/purchaseorderworkflow.sw.json | 2 +- parser/testdata/workflows/vitalscheck.json | 8 +- model/util.go => util/unmarshal.go | 26 +- .../unmarshal_benchmark_test.go | 2 +- model/util_test.go => util/unmarshal_test.go | 69 +- validator/validator.go | 26 +- validator/validator_test.go | 58 ++ validator/workflow.go | 154 +++++ 70 files changed, 3645 insertions(+), 1625 deletions(-) create mode 100644 model/action_data_filter_validator_test.go create mode 100644 model/action_validator.go create mode 100644 model/action_validator_test.go create mode 100644 model/auth_validator_test.go delete mode 100644 model/callback_state_test.go create mode 100644 model/callback_state_validator_test.go create mode 100644 model/delay_state_validator_test.go create mode 100644 model/event_data_filter_validator_test.go create mode 100644 model/event_state_validator.go create mode 100644 model/event_state_validator_test.go create mode 100644 model/function_validator_test.go create mode 100644 model/inject_state_validator_test.go create mode 100644 model/operation_state_validator_test.go create mode 100644 model/sleep_state_validator_test.go create mode 100644 model/state_exec_timeout_validator_test.go create mode 100644 model/workflow_ref_validator_test.go rename model/util.go => util/unmarshal.go (91%) rename model/util_benchmark_test.go => util/unmarshal_benchmark_test.go (98%) rename model/util_test.go => util/unmarshal_test.go (79%) create mode 100644 validator/workflow.go diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh index 353a682..8069d7e 100755 --- a/hack/deepcopy-gen.sh +++ b/hack/deepcopy-gen.sh @@ -44,5 +44,6 @@ if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then "${GOPATH}/bin/deepcopy-gen" -v 1 \ --input-dirs ./model -O zz_generated.deepcopy \ --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" \ + --output-base ./ "$@" fi diff --git a/model/action.go b/model/action.go index 5037ed1..a8d5705 100644 --- a/model/action.go +++ b/model/action.go @@ -14,6 +14,8 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + // Action specify invocations of services or other workflows during workflow execution. type Action struct { // Defines Unique action identifier. @@ -61,7 +63,7 @@ type actionUnmarshal Action // UnmarshalJSON implements json.Unmarshaler func (a *Action) UnmarshalJSON(data []byte) error { a.ApplyDefault() - return unmarshalObject("action", data, (*actionUnmarshal)(a)) + return util.UnmarshalObject("action", data, (*actionUnmarshal)(a)) } // ApplyDefault set the default values for Action @@ -93,7 +95,7 @@ type functionRefUnmarshal FunctionRef // UnmarshalJSON implements json.Unmarshaler func (f *FunctionRef) UnmarshalJSON(data []byte) error { f.ApplyDefault() - return unmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) + return util.UnmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) } // ApplyDefault set the default values for Function Ref @@ -117,5 +119,5 @@ type sleepUnmarshal Sleep // UnmarshalJSON implements json.Unmarshaler func (s *Sleep) UnmarshalJSON(data []byte) error { - return unmarshalObject("sleep", data, (*sleepUnmarshal)(s)) + return util.UnmarshalObject("sleep", data, (*sleepUnmarshal)(s)) } diff --git a/model/action_data_filter.go b/model/action_data_filter.go index 16f1615..060f12f 100644 --- a/model/action_data_filter.go +++ b/model/action_data_filter.go @@ -14,6 +14,8 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + // ActionDataFilter used to filter action data results. // +optional // +optional @@ -40,7 +42,7 @@ type actionDataFilterUnmarshal ActionDataFilter // UnmarshalJSON implements json.Unmarshaler func (a *ActionDataFilter) UnmarshalJSON(data []byte) error { a.ApplyDefault() - return unmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) + return util.UnmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) } // ApplyDefault set the default values for Action Data Filter diff --git a/model/action_data_filter_validator_test.go b/model/action_data_filter_validator_test.go new file mode 100644 index 0000000..df52da0 --- /dev/null +++ b/model/action_data_filter_validator_test.go @@ -0,0 +1,22 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func TestActionDataFilterStructLevelValidation(t *testing.T) { + testCases := []ValidationCase{} + StructLevelValidationCtx(t, testCases) +} diff --git a/model/action_test.go b/model/action_test.go index 5d0c7fb..55c399d 100644 --- a/model/action_test.go +++ b/model/action_test.go @@ -19,81 +19,8 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func TestSleepValidate(t *testing.T) { - type testCase struct { - desp string - sleep Sleep - err string - } - testCases := []testCase{ - { - desp: "all field empty", - sleep: Sleep{ - Before: "", - After: "", - }, - err: ``, - }, - { - desp: "only before field", - sleep: Sleep{ - Before: "PT5M", - After: "", - }, - err: ``, - }, - { - desp: "only after field", - sleep: Sleep{ - Before: "", - After: "PT5M", - }, - err: ``, - }, - { - desp: "all field", - sleep: Sleep{ - Before: "PT5M", - After: "PT5M", - }, - err: ``, - }, - { - desp: "invalid before value", - sleep: Sleep{ - Before: "T5M", - After: "PT5M", - }, - err: `Key: 'Sleep.Before' Error:Field validation for 'Before' failed on the 'iso8601duration' tag`, - }, - { - desp: "invalid after value", - sleep: Sleep{ - Before: "PT5M", - After: "T5M", - }, - err: `Key: 'Sleep.After' Error:Field validation for 'After' failed on the 'iso8601duration' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.sleep) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - func TestFunctionRefUnmarshalJSON(t *testing.T) { type testCase struct { desp string diff --git a/model/action_validator.go b/model/action_validator.go new file mode 100644 index 0000000..384469b --- /dev/null +++ b/model/action_validator.go @@ -0,0 +1,58 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + validator "github.com/go-playground/validator/v10" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(actionStructLevelValidationCtx), Action{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(functionRefStructLevelValidation), FunctionRef{}) +} + +func actionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { + action := structLevel.Current().Interface().(Action) + + if action.FunctionRef == nil && action.EventRef == nil && action.SubFlowRef == nil { + structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", "required_without", "") + return + } + + values := []bool{ + action.FunctionRef != nil, + action.EventRef != nil, + action.SubFlowRef != nil, + } + + if validationNotExclusiveParamters(values) { + structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", val.TagExclusive, "") + structLevel.ReportError(action.EventRef, "EventRef", "EventRef", val.TagExclusive, "") + structLevel.ReportError(action.SubFlowRef, "SubFlowRef", "SubFlowRef", val.TagExclusive, "") + } + + if action.RetryRef != "" && !ctx.ExistRetry(action.RetryRef) { + structLevel.ReportError(action.RetryRef, "RetryRef", "RetryRef", val.TagExists, "") + } +} + +func functionRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { + functionRef := structLevel.Current().Interface().(FunctionRef) + if !ctx.ExistFunction(functionRef.RefName) { + structLevel.ReportError(functionRef.RefName, "RefName", "RefName", val.TagExists, functionRef.RefName) + } +} diff --git a/model/action_validator_test.go b/model/action_validator_test.go new file mode 100644 index 0000000..5445f7b --- /dev/null +++ b/model/action_validator_test.go @@ -0,0 +1,200 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func buildActionByOperationState(state *State, name string) *Action { + action := Action{ + Name: name, + } + + state.OperationState.Actions = append(state.OperationState.Actions, action) + return &state.OperationState.Actions[len(state.OperationState.Actions)-1] +} + +func buildActionByForEachState(state *State, name string) *Action { + action := Action{ + Name: name, + } + + state.ForEachState.Actions = append(state.ForEachState.Actions, action) + return &state.ForEachState.Actions[len(state.ForEachState.Actions)-1] +} + +func buildActionByBranch(branch *Branch, name string) *Action { + action := Action{ + Name: name, + } + + branch.Actions = append(branch.Actions, action) + return &branch.Actions[len(branch.Actions)-1] +} + +func buildFunctionRef(workflow *Workflow, action *Action, name string) (*FunctionRef, *Function) { + function := Function{ + Name: name, + Operation: "http://function/function_name", + Type: FunctionTypeREST, + } + + functionRef := FunctionRef{ + RefName: name, + Invoke: InvokeKindSync, + } + action.FunctionRef = &functionRef + + workflow.Functions = append(workflow.Functions, function) + return &functionRef, &function +} + +func buildRetryRef(workflow *Workflow, action *Action, name string) { + retry := Retry{ + Name: name, + } + + workflow.Retries = append(workflow.Retries, retry) + action.RetryRef = name +} + +func buildSleep(action *Action) *Sleep { + action.Sleep = &Sleep{ + Before: "PT5S", + After: "PT5S", + } + return action.Sleep +} + +func TestActionStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "require_without", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].FunctionRef = nil + return *model + }, + Err: `workflow.states[0].actions[0].functionRef required when "eventRef" or "subFlowRef" is not defined`, + }, + { + Desp: "exclude", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildEventRef(model, &model.States[0].OperationState.Actions[0], "event 1", "event2") + return *model + }, + Err: `workflow.states[0].actions[0].functionRef exclusive +workflow.states[0].actions[0].eventRef exclusive +workflow.states[0].actions[0].subFlowRef exclusive`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].FunctionRef.Invoke = InvokeKindSync + "invalid" + return *model + }, + Err: `workflow.states[0].actions[0].functionRef.invoke need by one of [sync async]`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestFunctionRefStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].FunctionRef.RefName = "invalid function" + return *model + }, + Err: `workflow.states[0].actions[0].functionRef.refName don't exist "invalid function"`, + }, + } + StructLevelValidationCtx(t, testCases) +} + +func TestSleepStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildSleep(action1) + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].Sleep.Before = "" + model.States[0].OperationState.Actions[0].Sleep.After = "" + return *model + }, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].Sleep.Before = "P5S" + model.States[0].OperationState.Actions[0].Sleep.After = "P5S" + return *model + }, + Err: `workflow.states[0].actions[0].sleep.before invalid iso8601 duration "P5S" +workflow.states[0].actions[0].sleep.after invalid iso8601 duration "P5S"`, + }, + } + StructLevelValidationCtx(t, testCases) +} diff --git a/model/auth.go b/model/auth.go index 9646633..6632265 100644 --- a/model/auth.go +++ b/model/auth.go @@ -18,11 +18,25 @@ import ( "encoding/json" "fmt" "strings" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // AuthType can be "basic", "bearer", or "oauth2". Default is "basic" type AuthType string +func (i AuthType) KindValues() []string { + return []string{ + string(AuthTypeBasic), + string(AuthTypeBearer), + string(AuthTypeOAuth2), + } +} + +func (i AuthType) String() string { + return string(i) +} + const ( // AuthTypeBasic ... AuthTypeBasic AuthType = "basic" @@ -35,6 +49,18 @@ const ( // GrantType ... type GrantType string +func (i GrantType) KindValues() []string { + return []string{ + string(GrantTypePassword), + string(GrantTypeClientCredentials), + string(GrantTypeTokenExchange), + } +} + +func (i GrantType) String() string { + return string(i) +} + const ( // GrantTypePassword ... GrantTypePassword GrantType = "password" @@ -55,7 +81,7 @@ type Auth struct { // +kubebuilder:validation:Enum=basic;bearer;oauth2 // +kubebuilder:default=basic // +kubebuilder:validation:Required - Scheme AuthType `json:"scheme" validate:"min=1"` + Scheme AuthType `json:"scheme" validate:"required,oneofkind"` // Auth scheme properties. Can be one of "Basic properties definition", "Bearer properties definition", // or "OAuth2 properties definition" // +kubebuilder:validation:Required @@ -71,7 +97,7 @@ func (a *Auth) UnmarshalJSON(data []byte) error { PropertiesRaw json.RawMessage `json:"properties"` }{} - err := unmarshalObjectOrFile("auth", data, &authTmp) + err := util.UnmarshalObjectOrFile("auth", data, &authTmp) if err != nil { return err } @@ -84,13 +110,13 @@ func (a *Auth) UnmarshalJSON(data []byte) error { switch a.Scheme { case AuthTypeBasic: a.Properties.Basic = &BasicAuthProperties{} - return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) + return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) case AuthTypeBearer: a.Properties.Bearer = &BearerAuthProperties{} - return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) + return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) case AuthTypeOAuth2: a.Properties.OAuth2 = &OAuth2AuthProperties{} - return unmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) + return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) default: return fmt.Errorf("failed to parse auth properties") } @@ -162,7 +188,7 @@ type OAuth2AuthProperties struct { // Defines the grant type. Can be "password", "clientCredentials", or "tokenExchange" // +kubebuilder:validation:Enum=password;clientCredentials;tokenExchange // +kubebuilder:validation:Required - GrantType GrantType `json:"grantType" validate:"required"` + GrantType GrantType `json:"grantType" validate:"required,oneofkind"` // String or a workflow expression. Contains the client identifier. // +kubebuilder:validation:Required ClientID string `json:"clientId" validate:"required"` diff --git a/model/auth_validator_test.go b/model/auth_validator_test.go new file mode 100644 index 0000000..e2ce55d --- /dev/null +++ b/model/auth_validator_test.go @@ -0,0 +1,210 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func buildAuth(workflow *Workflow, name string) *Auth { + auth := Auth{ + Name: name, + Scheme: AuthTypeBasic, + } + workflow.Auth = append(workflow.Auth, auth) + return &workflow.Auth[len(workflow.Auth)-1] +} + +func buildBasicAuthProperties(auth *Auth) *BasicAuthProperties { + auth.Scheme = AuthTypeBasic + auth.Properties = AuthProperties{ + Basic: &BasicAuthProperties{ + Username: "username", + Password: "password", + }, + } + + return auth.Properties.Basic +} + +func buildOAuth2AuthProperties(auth *Auth) *OAuth2AuthProperties { + auth.Scheme = AuthTypeOAuth2 + auth.Properties = AuthProperties{ + OAuth2: &OAuth2AuthProperties{ + ClientID: "clientId", + GrantType: GrantTypePassword, + }, + } + + return auth.Properties.OAuth2 +} + +func buildBearerAuthProperties(auth *Auth) *BearerAuthProperties { + auth.Scheme = AuthTypeBearer + auth.Properties = AuthProperties{ + Bearer: &BearerAuthProperties{ + Token: "token", + }, + } + + return auth.Properties.Bearer +} + +func TestAuthStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + auth := buildAuth(baseWorkflow, "auth 1") + buildBasicAuthProperties(auth) + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth[0].Name = "" + return *model + }, + Err: `workflow.auth[0].name is required`, + }, + { + Desp: "repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth = append(model.Auth, model.Auth[0]) + return *model + }, + Err: `workflow.auth has duplicate "name"`, + }, + } + StructLevelValidationCtx(t, testCases) +} + +func TestBasicAuthPropertiesStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + auth := buildAuth(baseWorkflow, "auth 1") + buildBasicAuthProperties(auth) + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth[0].Properties.Basic.Username = "" + model.Auth[0].Properties.Basic.Password = "" + return *model + }, + Err: `workflow.auth[0].properties.basic.username is required +workflow.auth[0].properties.basic.password is required`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestBearerAuthPropertiesStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + auth := buildAuth(baseWorkflow, "auth 1") + buildBearerAuthProperties(auth) + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth[0].Properties.Bearer.Token = "" + return *model + }, + Err: `workflow.auth[0].properties.bearer.token is required`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestOAuth2AuthPropertiesPropertiesStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + auth := buildAuth(baseWorkflow, "auth 1") + buildOAuth2AuthProperties(auth) + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth[0].Properties.OAuth2.GrantType = "" + model.Auth[0].Properties.OAuth2.ClientID = "" + return *model + }, + Err: `workflow.auth[0].properties.oAuth2.grantType is required +workflow.auth[0].properties.oAuth2.clientID is required`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Auth[0].Properties.OAuth2.GrantType = GrantTypePassword + "invalid" + return *model + }, + Err: `workflow.auth[0].properties.oAuth2.grantType need by one of [password clientCredentials tokenExchange]`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/callback_state.go b/model/callback_state.go index f35ec38..1dadcb6 100644 --- a/model/callback_state.go +++ b/model/callback_state.go @@ -22,7 +22,7 @@ import ( type CallbackState struct { // Defines the action to be executed. // +kubebuilder:validation:Required - Action Action `json:"action" validate:"required"` + Action Action `json:"action"` // References a unique callback event name in the defined workflow events. // +kubebuilder:validation:Required EventRef string `json:"eventRef" validate:"required"` diff --git a/model/callback_state_test.go b/model/callback_state_test.go deleted file mode 100644 index 9e3e856..0000000 --- a/model/callback_state_test.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func TestCallbackStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - callbackStateObj State - err string - } - testCases := []testCase{ - { - desp: "normal", - callbackStateObj: State{ - BaseState: BaseState{ - Name: "callbackTest", - Type: StateTypeCallback, - End: &End{ - Terminate: true, - }, - }, - CallbackState: &CallbackState{ - Action: Action{ - ID: "1", - Name: "action1", - }, - EventRef: "refExample", - }, - }, - err: ``, - }, - { - desp: "missing required EventRef", - callbackStateObj: State{ - BaseState: BaseState{ - Name: "callbackTest", - Type: StateTypeCallback, - }, - CallbackState: &CallbackState{ - Action: Action{ - ID: "1", - Name: "action1", - }, - }, - }, - err: `Key: 'State.CallbackState.EventRef' Error:Field validation for 'EventRef' failed on the 'required' tag`, - }, - // TODO need to register custom types - will be fixed by https://github.com/serverlessworkflow/sdk-go/issues/151 - //{ - // desp: "missing required Action", - // callbackStateObj: State{ - // BaseState: BaseState{ - // Name: "callbackTest", - // Type: StateTypeCallback, - // }, - // CallbackState: &CallbackState{ - // EventRef: "refExample", - // }, - // }, - // err: `Key: 'State.CallbackState.Action' Error:Field validation for 'Action' failed on the 'required' tag`, - //}, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(&tc.callbackStateObj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/callback_state_validator_test.go b/model/callback_state_validator_test.go new file mode 100644 index 0000000..a89cea9 --- /dev/null +++ b/model/callback_state_validator_test.go @@ -0,0 +1,116 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func buildCallbackState(workflow *Workflow, name, eventRef string) *State { + consumeEvent := Event{ + Name: eventRef, + Type: "event type", + Kind: EventKindProduced, + } + workflow.Events = append(workflow.Events, consumeEvent) + + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeCallback, + }, + CallbackState: &CallbackState{ + EventRef: eventRef, + }, + } + workflow.States = append(workflow.States, state) + + return &workflow.States[len(workflow.States)-1] +} + +func buildCallbackStateTimeout(callbackState *CallbackState) *CallbackStateTimeout { + callbackState.Timeouts = &CallbackStateTimeout{} + return callbackState.Timeouts +} + +func TestCallbackStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") + buildEndByState(callbackState, true, false) + buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].CallbackState.EventRef = "" + return *model + }, + Err: `workflow.states[0].callbackState.eventRef is required`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestCallbackStateTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") + buildEndByState(callbackState, true, false) + buildCallbackStateTimeout(callbackState.CallbackState) + buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") + + testCases := []ValidationCase{ + { + Desp: `success`, + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: `omitempty`, + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].CallbackState.Timeouts.ActionExecTimeout = "" + model.States[0].CallbackState.Timeouts.EventTimeout = "" + return *model + }, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].CallbackState.Timeouts.ActionExecTimeout = "P5S" + model.States[0].CallbackState.Timeouts.EventTimeout = "P5S" + return *model + }, + Err: `workflow.states[0].callbackState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" +workflow.states[0].callbackState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/delay_state_test.go b/model/delay_state_test.go index 79f49e5..c960f3c 100644 --- a/model/delay_state_test.go +++ b/model/delay_state_test.go @@ -13,76 +13,3 @@ // limitations under the License. package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func TestDelayStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - delayStateObj State - err string - } - testCases := []testCase{ - { - desp: "normal", - delayStateObj: State{ - BaseState: BaseState{ - Name: "1", - Type: "delay", - End: &End{ - Terminate: true, - }, - }, - DelayState: &DelayState{ - TimeDelay: "PT5S", - }, - }, - err: ``, - }, - { - desp: "missing required timeDelay", - delayStateObj: State{ - BaseState: BaseState{ - Name: "1", - Type: "delay", - }, - DelayState: &DelayState{ - TimeDelay: "", - }, - }, - err: `Key: 'State.DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'required' tag`, - }, - { - desp: "invalid timeDelay duration", - delayStateObj: State{ - BaseState: BaseState{ - Name: "1", - Type: "delay", - }, - DelayState: &DelayState{ - TimeDelay: "P5S", - }, - }, - err: `Key: 'State.DelayState.TimeDelay' Error:Field validation for 'TimeDelay' failed on the 'iso8601duration' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.delayStateObj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/delay_state_validator_test.go b/model/delay_state_validator_test.go new file mode 100644 index 0000000..aed36c5 --- /dev/null +++ b/model/delay_state_validator_test.go @@ -0,0 +1,68 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func buildDelayState(workflow *Workflow, name, timeDelay string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeDelay, + }, + DelayState: &DelayState{ + TimeDelay: timeDelay, + }, + } + workflow.States = append(workflow.States, state) + + return &workflow.States[len(workflow.States)-1] +} + +func TestDelayStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + delayState := buildDelayState(baseWorkflow, "start state", "PT5S") + buildEndByState(delayState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].DelayState.TimeDelay = "" + return *model + }, + Err: `workflow.states[0].delayState.timeDelay is required`, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].DelayState.TimeDelay = "P5S" + return *model + }, + Err: `workflow.states[0].delayState.timeDelay invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/event.go b/model/event.go index 08545c5..a9c5a69 100644 --- a/model/event.go +++ b/model/event.go @@ -14,9 +14,22 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + // EventKind defines this event as either `consumed` or `produced` type EventKind string +func (i EventKind) KindValues() []string { + return []string{ + string(EventKindConsumed), + string(EventKindProduced), + } +} + +func (i EventKind) String() string { + return string(i) +} + const ( // EventKindConsumed means the event continuation of workflow instance execution EventKindConsumed EventKind = "consumed" @@ -40,14 +53,14 @@ type Event struct { // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Defaults to `consumed`. // +kubebuilder:validation:Enum=consumed;produced // +kubebuilder:default=consumed - Kind EventKind `json:"kind,omitempty"` + Kind EventKind `json:"kind,omitempty" validate:"required,oneofkind"` // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload // and context attributes should be accessible. Defaults to true. // +optional DataOnly bool `json:"dataOnly,omitempty"` // Define event correlation rules for this event. Only used for consumed events. // +optional - Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` + Correlation []Correlation `json:"correlation,omitempty" validate:"dive"` } type eventUnmarshal Event @@ -55,7 +68,7 @@ type eventUnmarshal Event // UnmarshalJSON unmarshal Event object from json bytes func (e *Event) UnmarshalJSON(data []byte) error { e.ApplyDefault() - return unmarshalObject("event", data, (*eventUnmarshal)(e)) + return util.UnmarshalObject("event", data, (*eventUnmarshal)(e)) } // ApplyDefault set the default values for Event @@ -105,7 +118,7 @@ type eventRefUnmarshal EventRef // UnmarshalJSON implements json.Unmarshaler func (e *EventRef) UnmarshalJSON(data []byte) error { e.ApplyDefault() - return unmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) + return util.UnmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) } // ApplyDefault set the default values for Event Ref diff --git a/model/event_data_filter.go b/model/event_data_filter.go index a69c7d3..a725a1b 100644 --- a/model/event_data_filter.go +++ b/model/event_data_filter.go @@ -14,6 +14,8 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + // EventDataFilter used to filter consumed event payloads. type EventDataFilter struct { // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' @@ -34,7 +36,7 @@ type eventDataFilterUnmarshal EventDataFilter // UnmarshalJSON implements json.Unmarshaler func (f *EventDataFilter) UnmarshalJSON(data []byte) error { f.ApplyDefault() - return unmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) + return util.UnmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) } // ApplyDefault set the default values for Event Data Filter diff --git a/model/event_data_filter_validator_test.go b/model/event_data_filter_validator_test.go new file mode 100644 index 0000000..1bbbac9 --- /dev/null +++ b/model/event_data_filter_validator_test.go @@ -0,0 +1,22 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func TestEventDataFilterStateStructLevelValidation(t *testing.T) { + testCases := []ValidationCase{} + StructLevelValidationCtx(t, testCases) +} diff --git a/model/event_state.go b/model/event_state.go index 1d6235a..37d3840 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -16,6 +16,8 @@ package model import ( "encoding/json" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // EventState await one or more events and perform actions when they are received. If defined as the @@ -53,7 +55,7 @@ type eventStateUnmarshal EventState // UnmarshalJSON unmarshal EventState object from json bytes func (e *EventState) UnmarshalJSON(data []byte) error { e.ApplyDefault() - return unmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) + return util.UnmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) } // ApplyDefault set the default values for Event State @@ -69,10 +71,10 @@ type OnEvents struct { // Should actions be performed sequentially or in parallel. Default is sequential. // +kubebuilder:validation:Enum=sequential;parallel // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` + ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` // Actions to be performed if expression matches // +optional - Actions []Action `json:"actions,omitempty" validate:"omitempty,dive"` + Actions []Action `json:"actions,omitempty" validate:"dive"` // eventDataFilter defines the callback event data filter definition // +optional EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` @@ -83,7 +85,7 @@ type onEventsUnmarshal OnEvents // UnmarshalJSON unmarshal OnEvents object from json bytes func (o *OnEvents) UnmarshalJSON(data []byte) error { o.ApplyDefault() - return unmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) + return util.UnmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) } // ApplyDefault set the default values for On Events diff --git a/model/event_state_validator.go b/model/event_state_validator.go new file mode 100644 index 0000000..d4f2f40 --- /dev/null +++ b/model/event_state_validator.go @@ -0,0 +1,39 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + validator "github.com/go-playground/validator/v10" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func init() { + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStateStructLevelValidationCtx), EventState{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onEventsStructLevelValidationCtx), OnEvents{}) +} + +func eventStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { + // EventRefs +} + +func onEventsStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { + onEvent := structLevel.Current().Interface().(OnEvents) + for _, eventRef := range onEvent.EventRefs { + if eventRef != "" && !ctx.ExistEvent(eventRef) { + structLevel.ReportError(eventRef, "eventRefs", "EventRefs", val.TagExists, "") + } + } +} diff --git a/model/event_state_validator_test.go b/model/event_state_validator_test.go new file mode 100644 index 0000000..ea7d319 --- /dev/null +++ b/model/event_state_validator_test.go @@ -0,0 +1,189 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func buildEventState(workflow *Workflow, name string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeEvent, + }, + EventState: &EventState{}, + } + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func buildOnEvents(workflow *Workflow, state *State, name string) *OnEvents { + event := Event{ + Name: name, + Type: "type", + Kind: EventKindProduced, + } + workflow.Events = append(workflow.Events, event) + + state.EventState.OnEvents = append(state.EventState.OnEvents, OnEvents{ + EventRefs: []string{event.Name}, + ActionMode: ActionModeParallel, + }) + + return &state.EventState.OnEvents[len(state.EventState.OnEvents)-1] +} + +func buildEventStateTimeout(state *State) *EventStateTimeout { + state.EventState.Timeouts = &EventStateTimeout{ + ActionExecTimeout: "PT5S", + EventTimeout: "PT5S", + } + return state.EventState.Timeouts +} + +func TestEventStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + eventState := buildEventState(baseWorkflow, "start state") + buildOnEvents(baseWorkflow, eventState, "event 1") + buildEndByState(eventState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents = nil + return *model + }, + Err: `workflow.states[0].eventState.onEvents is required`, + }, + { + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents = []OnEvents{} + return *model + }, + Err: `workflow.states[0].eventState.onEvents must have the minimum 1`, + }, + } + StructLevelValidationCtx(t, testCases) +} + +func TestOnEventsStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + eventState := buildEventState(baseWorkflow, "start state") + buildOnEvents(baseWorkflow, eventState, "event 1") + buildEndByState(eventState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents[0].EventRefs = []string{"event not found"} + return *model + }, + Err: `workflow.states[0].eventState.onEvents[0].eventRefs don't exist "event not found"`, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents[0].EventRefs = nil + model.States[0].EventState.OnEvents[0].ActionMode = "" + return *model + }, + Err: `workflow.states[0].eventState.onEvents[0].eventRefs is required +workflow.states[0].eventState.onEvents[0].actionMode is required`, + }, + { + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents[0].EventRefs = []string{} + return *model + }, + Err: `workflow.states[0].eventState.onEvents[0].eventRefs must have the minimum 1`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.OnEvents[0].ActionMode = ActionModeParallel + "invalid" + return *model + }, + Err: `workflow.states[0].eventState.onEvents[0].actionMode need by one of [sequential parallel]`, + }, + } + StructLevelValidationCtx(t, testCases) +} + +func TestEventStateTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + eventState := buildEventState(baseWorkflow, "start state") + buildEventStateTimeout(eventState) + buildOnEvents(baseWorkflow, eventState, "event 1") + buildEndByState(eventState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.Timeouts.ActionExecTimeout = "" + model.States[0].EventState.Timeouts.EventTimeout = "" + return *model + }, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].EventState.Timeouts.ActionExecTimeout = "P5S" + model.States[0].EventState.Timeouts.EventTimeout = "P5S" + return *model + }, + Err: `workflow.states[0].eventState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" +workflow.states[0].eventState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/event_validator.go b/model/event_validator.go index 8d134af..7b4daa9 100644 --- a/model/event_validator.go +++ b/model/event_validator.go @@ -15,20 +15,26 @@ package model import ( - "reflect" - validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func init() { - val.GetValidator().RegisterStructValidation(eventStructLevelValidation, Event{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStructLevelValidation), Event{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventRefStructLevelValidation), EventRef{}) } // eventStructLevelValidation custom validator for event kind consumed -func eventStructLevelValidation(structLevel validator.StructLevel) { - event := structLevel.Current().Interface().(Event) - if event.Kind == EventKindConsumed && len(event.Type) == 0 { - structLevel.ReportError(reflect.ValueOf(event.Type), "Type", "type", "reqtypeconsumed", "") +func eventStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { +} + +func eventRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { + model := structLevel.Current().Interface().(EventRef) + if model.TriggerEventRef != "" && !ctx.ExistEvent(model.TriggerEventRef) { + structLevel.ReportError(model.TriggerEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") + } + if model.ResultEventRef != "" && !ctx.ExistEvent(model.ResultEventRef) { + structLevel.ReportError(model.ResultEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") } } diff --git a/model/event_validator_test.go b/model/event_validator_test.go index 90caa9c..80340b0 100644 --- a/model/event_validator_test.go +++ b/model/event_validator_test.go @@ -16,51 +16,201 @@ package model import ( "testing" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" ) -func TestEventRefStructLevelValidation(t *testing.T) { - type testCase struct { - name string - eventRef EventRef - err string +func buildEventRef(workflow *Workflow, action *Action, triggerEvent, resultEvent string) *EventRef { + produceEvent := Event{ + Name: triggerEvent, + Type: "event type", + Kind: EventKindProduced, + } + + consumeEvent := Event{ + Name: resultEvent, + Type: "event type", + Kind: EventKindProduced, + } + + workflow.Events = append(workflow.Events, produceEvent) + workflow.Events = append(workflow.Events, consumeEvent) + + eventRef := &EventRef{ + TriggerEventRef: triggerEvent, + ResultEventRef: resultEvent, + Invoke: InvokeKindSync, + } + + action.EventRef = eventRef + return action.EventRef +} + +func buildCorrelation(event *Event) *Correlation { + event.Correlation = append(event.Correlation, Correlation{ + ContextAttributeName: "attribute name", + }) + + return &event.Correlation[len(event.Correlation)-1] +} + +func TestEventStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + baseWorkflow.Events = Events{{ + Name: "event 1", + Type: "event type", + Kind: EventKindConsumed, + }} + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Events = append(model.Events, model.Events[0]) + return *model + }, + Err: `workflow.events has duplicate "name"`, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Events[0].Name = "" + model.Events[0].Type = "" + model.Events[0].Kind = "" + return *model + }, + Err: `workflow.events[0].name is required +workflow.events[0].type is required +workflow.events[0].kind is required`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Events[0].Kind = EventKindConsumed + "invalid" + return *model + }, + Err: `workflow.events[0].kind need by one of [consumed produced]`, + }, } + StructLevelValidationCtx(t, testCases) +} + +func TestCorrelationStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + baseWorkflow.Events = Events{{ + Name: "event 1", + Type: "event type", + Kind: EventKindConsumed, + }} + + buildCorrelation(&baseWorkflow.Events[0]) + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") - testCases := []testCase{ + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, { - name: "valid resultEventTimeout", - eventRef: EventRef{ - TriggerEventRef: "example valid", - ResultEventRef: "example valid", - ResultEventTimeout: "PT1H", - Invoke: InvokeKindSync, + Desp: "empty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Events[0].Correlation = nil + return *model }, - err: ``, }, { - name: "invalid resultEventTimeout", - eventRef: EventRef{ - TriggerEventRef: "example invalid", - ResultEventRef: "example invalid red", - ResultEventTimeout: "10hs", - Invoke: InvokeKindSync, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Events[0].Correlation[0].ContextAttributeName = "" + return *model }, - err: `Key: 'EventRef.ResultEventTimeout' Error:Field validation for 'ResultEventTimeout' failed on the 'iso8601duration' tag`, + Err: `workflow.events[0].correlation[0].contextAttributeName is required`, }, + //TODO: Add test: correlation only used for `consumed` events } - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := val.GetValidator().Struct(tc.eventRef) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - }) + StructLevelValidationCtx(t, testCases) +} + +func TestEventRefStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + eventRef := buildEventRef(baseWorkflow, action1, "event 1", "event 2") + eventRef.ResultEventTimeout = "PT1H" + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "" + model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "" + return *model + }, + Err: `workflow.states[0].actions[0].eventRef.triggerEventRef is required +workflow.states[0].actions[0].eventRef.resultEventRef is required`, + }, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "invalid event" + model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "invalid event 2" + return *model + }, + Err: `workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event" +workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event 2"`, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].EventRef.ResultEventTimeout = "10hs" + return *model + }, + Err: `workflow.states[0].actions[0].eventRef.resultEventTimeout invalid iso8601 duration "10hs"`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].EventRef.Invoke = InvokeKindSync + "invalid" + return *model + }, + Err: `workflow.states[0].actions[0].eventRef.invoke need by one of [sync async]`, + }, } + + StructLevelValidationCtx(t, testCases) } diff --git a/model/foreach_state.go b/model/foreach_state.go index ad25b89..7202614 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -18,6 +18,8 @@ import ( "encoding/json" "k8s.io/apimachinery/pkg/util/intstr" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) @@ -86,7 +88,7 @@ type forEachStateUnmarshal ForEachState // UnmarshalJSON implements json.Unmarshaler func (f *ForEachState) UnmarshalJSON(data []byte) error { f.ApplyDefault() - return unmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) + return util.UnmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) } // ApplyDefault set the default values for ForEach State diff --git a/model/foreach_state_validator.go b/model/foreach_state_validator.go index 6543ded..d1d9894 100644 --- a/model/foreach_state_validator.go +++ b/model/foreach_state_validator.go @@ -17,11 +17,10 @@ package model import ( "context" "reflect" - "strconv" validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" - "k8s.io/apimachinery/pkg/util/intstr" ) func init() { @@ -40,20 +39,7 @@ func forEachStateStructLevelValidation(_ context.Context, structLevel validator. return } - switch stateObj.BatchSize.Type { - case intstr.Int: - if stateObj.BatchSize.IntVal <= 0 { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } - case intstr.String: - v, err := strconv.Atoi(stateObj.BatchSize.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", err.Error()) - return - } - - if v <= 0 { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } + if !val.ValidateGt0IntStr(stateObj.BatchSize) { + structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") } } diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go index 1f6d5e7..bc48a6c 100644 --- a/model/foreach_state_validator_test.go +++ b/model/foreach_state_validator_test.go @@ -17,167 +17,105 @@ package model import ( "testing" - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" "k8s.io/apimachinery/pkg/util/intstr" ) -func TestForEachStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - state State - err string +func buildForEachState(workflow *Workflow, name string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeForEach, + }, + ForEachState: &ForEachState{ + InputCollection: "3", + Mode: ForEachModeTypeSequential, + }, } - testCases := []testCase{ + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func TestForEachStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + forEachState := buildForEachState(baseWorkflow, "start state") + buildEndByState(forEachState, true, false) + action1 := buildActionByForEachState(forEachState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - desp: "normal test & sequential", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeForEach, - End: &End{ - Terminate: true, - }, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeSequential, - }, + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.Mode = ForEachModeTypeParallel + model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 1, + } + return *model }, - err: ``, }, { - desp: "normal test & parallel int", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeForEach, - End: &End{ - Terminate: true, - }, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - }, - }, + Desp: "success without batch size", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.Mode = ForEachModeTypeParallel + model.States[0].ForEachState.BatchSize = nil + return *model }, - err: ``, }, { - desp: "normal test & parallel string", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeForEach, - End: &End{ - Terminate: true, - }, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", - }, - }, + Desp: "gt0 int", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.Mode = ForEachModeTypeParallel + model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 0, + } + return *model }, - err: ``, + Err: `workflow.states[0].forEachState.batchSize must be greater than 0`, }, { - desp: "invalid parallel int", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeForEach, - End: &End{ - Terminate: true, - }, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - }, - }, + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.Mode = ForEachModeTypeParallel + "invalid" + return *model }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + Err: `workflow.states[0].forEachState.mode need by one of [sequential parallel]`, }, { - desp: "invalid parallel string", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - End: &End{ - Terminate: true, - }, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", - }, - }, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.InputCollection = "" + model.States[0].ForEachState.Mode = "" + model.States[0].ForEachState.Actions = nil + return *model }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + Err: `workflow.states[0].forEachState.inputCollection is required +workflow.states[0].forEachState.actions is required +workflow.states[0].forEachState.mode is required`, }, { - desp: "invalid parallel string format", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "2", - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Actions: []Action{ - {}, - }, - Mode: ForEachModeTypeParallel, - BatchSize: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "a", - }, - }, + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ForEachState.Actions = []Action{} + return *model }, - err: `Key: 'State.ForEachState.BatchSize' Error:Field validation for 'BatchSize' failed on the 'gt0' tag`, + Err: `workflow.states[0].forEachState.actions must have the minimum 1`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.state) - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } + StructLevelValidationCtx(t, testCases) +} - assert.NoError(t, err) - }) - } +func TestForEachStateTimeoutStructLevelValidation(t *testing.T) { + testCases := []ValidationCase{} + StructLevelValidationCtx(t, testCases) } diff --git a/model/function.go b/model/function.go index 49e23ab..07e6f77 100644 --- a/model/function.go +++ b/model/function.go @@ -14,6 +14,8 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + const ( // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service // operation that needs to be invoked, separated by a '#'. @@ -40,6 +42,22 @@ const ( // FunctionType ... type FunctionType string +func (i FunctionType) KindValues() []string { + return []string{ + string(FunctionTypeREST), + string(FunctionTypeRPC), + string(FunctionTypeExpression), + string(FunctionTypeGraphQL), + string(FunctionTypeAsyncAPI), + string(FunctionTypeOData), + string(FunctionTypeCustom), + } +} + +func (i FunctionType) String() string { + return string(i) +} + // Function ... type Function struct { Common `json:",inline"` @@ -51,13 +69,26 @@ type Function struct { // If type is `expression`, defines the workflow expression. If the type is `custom`, // #. // +kubebuilder:validation:Required - Operation string `json:"operation" validate:"required,oneof=rest rpc expression"` + Operation string `json:"operation" validate:"required"` // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. // Default is `rest`. // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;odata;asyncapi;custom // +kubebuilder:default=rest - Type FunctionType `json:"type,omitempty"` + Type FunctionType `json:"type,omitempty" validate:"required,oneofkind"` // References an auth definition name to be used to access to resource defined in the operation parameter. // +optional - AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` + AuthRef string `json:"authRef,omitempty"` +} + +type functionUnmarshal Function + +// UnmarshalJSON implements json unmarshaler interface +func (f *Function) UnmarshalJSON(data []byte) error { + f.ApplyDefault() + return util.UnmarshalObject("function", data, (*functionUnmarshal)(f)) +} + +// ApplyDefault set the default values for Function +func (f *Function) ApplyDefault() { + f.Type = FunctionTypeREST } diff --git a/model/function_validator_test.go b/model/function_validator_test.go new file mode 100644 index 0000000..fcde6b9 --- /dev/null +++ b/model/function_validator_test.go @@ -0,0 +1,74 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func TestFunctionStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + baseWorkflow.Functions = Functions{{ + Name: "function 1", + Operation: "http://function/action", + Type: FunctionTypeREST, + }} + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 2") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Functions[0].Name = "" + model.Functions[0].Operation = "" + model.Functions[0].Type = "" + return *model + }, + Err: `workflow.functions[0].name is required +workflow.functions[0].operation is required +workflow.functions[0].type is required`, + }, + { + Desp: "repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Functions = append(model.Functions, model.Functions[0]) + return *model + }, + Err: `workflow.functions has duplicate "name"`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Functions[0].Type = FunctionTypeREST + "invalid" + return *model + }, + Err: `workflow.functions[0].type need by one of [rest rpc expression graphql asyncapi odata custom]`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/inject_state_validator_test.go b/model/inject_state_validator_test.go new file mode 100644 index 0000000..a8f127c --- /dev/null +++ b/model/inject_state_validator_test.go @@ -0,0 +1,28 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func TestInjectStateStructLevelValidation(t *testing.T) { + testCases := []ValidationCase{} + StructLevelValidationCtx(t, testCases) +} + +func TestInjectStateTimeoutStateStructLevelValidation(t *testing.T) { + testCases := []ValidationCase{} + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/operation_state.go b/model/operation_state.go index ebe97e0..da523ea 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -16,6 +16,8 @@ package model import ( "encoding/json" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // OperationState defines a set of actions to be performed in sequence or in parallel. @@ -23,10 +25,10 @@ type OperationState struct { // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. // +kubebuilder:validation:Enum=sequential;parallel // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneof=sequential parallel"` + ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` // Actions to be performed // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions" validate:"required,min=1,dive"` + Actions []Action `json:"actions" validate:"min=1,dive"` // State specific timeouts // +optional Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` @@ -49,7 +51,7 @@ type operationStateUnmarshal OperationState // UnmarshalJSON unmarshal OperationState object from json bytes func (o *OperationState) UnmarshalJSON(data []byte) error { o.ApplyDefault() - return unmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) + return util.UnmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) } // ApplyDefault set the default values for Operation State diff --git a/model/operation_state_validator_test.go b/model/operation_state_validator_test.go new file mode 100644 index 0000000..ead04a8 --- /dev/null +++ b/model/operation_state_validator_test.go @@ -0,0 +1,121 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func buildOperationState(workflow *Workflow, name string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeOperation, + }, + OperationState: &OperationState{ + ActionMode: ActionModeSequential, + }, + } + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func buildOperationStateTimeout(state *State) *OperationStateTimeout { + state.OperationState.Timeouts = &OperationStateTimeout{ + ActionExecTimeout: "PT5S", + } + return state.OperationState.Timeouts +} + +func TestOperationStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions = []Action{} + return *model + }, + Err: `workflow.states[0].actions must have the minimum 1`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.ActionMode = ActionModeParallel + "invalid" + return *model + }, + Err: `workflow.states[0].actionMode need by one of [sequential parallel]`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestOperationStateTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + operationStateTimeout := buildOperationStateTimeout(operationState) + buildStateExecTimeoutByOperationStateTimeout(operationStateTimeout) + + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Timeouts.ActionExecTimeout = "" + return *model + }, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Timeouts.ActionExecTimeout = "P5S" + return *model + }, + Err: `workflow.states[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/parallel_state.go b/model/parallel_state.go index f46fa0a..96edd7a 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -18,11 +18,24 @@ import ( "encoding/json" "k8s.io/apimachinery/pkg/util/intstr" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // CompletionType define on how to complete branch execution. type CompletionType string +func (i CompletionType) KindValues() []string { + return []string{ + string(CompletionTypeAllOf), + string(CompletionTypeAtLeast), + } +} + +func (i CompletionType) String() string { + return string(i) +} + const ( // CompletionTypeAllOf defines all branches must complete execution before the state can transition/end. CompletionTypeAllOf CompletionType = "allOf" @@ -39,7 +52,7 @@ type ParallelState struct { // Option types on how to complete branch execution. Defaults to `allOf`. // +kubebuilder:validation:Enum=allOf;atLeast // +kubebuilder:default=allOf - CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneof=allOf atLeast"` + CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneofkind"` // Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete // in order for the state to transition/end. // +optional @@ -67,7 +80,7 @@ type parallelStateUnmarshal ParallelState // UnmarshalJSON unmarshal ParallelState object from json bytes func (ps *ParallelState) UnmarshalJSON(data []byte) error { ps.ApplyDefault() - return unmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) + return util.UnmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) } // ApplyDefault set the default values for Parallel State diff --git a/model/parallel_state_validator.go b/model/parallel_state_validator.go index 5286988..5999071 100644 --- a/model/parallel_state_validator.go +++ b/model/parallel_state_validator.go @@ -17,11 +17,10 @@ package model import ( "context" "reflect" - "strconv" validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" - "k8s.io/apimachinery/pkg/util/intstr" ) func init() { @@ -32,24 +31,9 @@ func init() { func parallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { parallelStateObj := structLevel.Current().Interface().(ParallelState) - if parallelStateObj.CompletionType == CompletionTypeAllOf { - return - } - - switch parallelStateObj.NumCompleted.Type { - case intstr.Int: - if parallelStateObj.NumCompleted.IntVal <= 0 { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") - } - case intstr.String: - v, err := strconv.Atoi(parallelStateObj.NumCompleted.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", err.Error()) - return - } - - if v <= 0 { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "numCompleted", "gt0", "") + if parallelStateObj.CompletionType == CompletionTypeAtLeast { + if !val.ValidateGt0IntStr(¶llelStateObj.NumCompleted) { + structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "NumCompleted", "gt0", "") } } } diff --git a/model/parallel_state_validator_test.go b/model/parallel_state_validator_test.go index cc321ae..d1acea9 100644 --- a/model/parallel_state_validator_test.go +++ b/model/parallel_state_validator_test.go @@ -17,154 +17,236 @@ package model import ( "testing" - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" "k8s.io/apimachinery/pkg/util/intstr" ) +func buildParallelState(workflow *Workflow, name string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeParallel, + }, + ParallelState: &ParallelState{ + CompletionType: CompletionTypeAllOf, + }, + } + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func buildBranch(state *State, name string) *Branch { + branch := Branch{ + Name: name, + } + + state.ParallelState.Branches = append(state.ParallelState.Branches, branch) + return &state.ParallelState.Branches[len(state.ParallelState.Branches)-1] +} + +func buildBranchTimeouts(branch *Branch) *BranchTimeouts { + branch.Timeouts = &BranchTimeouts{} + return branch.Timeouts +} + +func buildParallelStateTimeout(state *State) *ParallelStateTimeout { + state.ParallelState.Timeouts = &ParallelStateTimeout{ + BranchExecTimeout: "PT5S", + } + return state.ParallelState.Timeouts +} + func TestParallelStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - state *State - err string + baseWorkflow := buildWorkflow() + + parallelState := buildParallelState(baseWorkflow, "start state") + buildEndByState(parallelState, true, false) + branch := buildBranch(parallelState, "brach 1") + action1 := buildActionByBranch(branch, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success completionTypeAllOf", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "success completionTypeAtLeast", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + model.States[0].ParallelState.NumCompleted = intstr.FromInt(1) + return *model + }, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + " invalid" + return *model + }, + Err: `workflow.states[0].parallelState.completionType need by one of [allOf atLeast]`, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches = nil + model.States[0].ParallelState.CompletionType = "" + return *model + }, + Err: `workflow.states[0].parallelState.branches is required +workflow.states[0].parallelState.completionType is required`, + }, + { + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches = []Branch{} + return *model + }, + Err: `workflow.states[0].parallelState.branches must have the minimum 1`, + }, + { + Desp: "required numCompleted", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + return *model + }, + Err: `workflow.states[0].parallelState.numCompleted must be greater than 0`, + }, } - testCases := []testCase{ + + StructLevelValidationCtx(t, testCases) +} + +func TestBranchStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + parallelState := buildParallelState(baseWorkflow, "start state") + buildEndByState(parallelState, true, false) + branch := buildBranch(parallelState, "brach 1") + action1 := buildActionByBranch(branch, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - desp: "normal", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - End: &End{ - Terminate: true, - }, - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model }, - err: ``, }, { - desp: "invalid completeType", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - End: &End{ - Terminate: true, - }, - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAllOf + "1", - }, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches[0].Name = "" + model.States[0].ParallelState.Branches[0].Actions = nil + return *model }, - err: `Key: 'State.ParallelState.CompletionType' Error:Field validation for 'CompletionType' failed on the 'oneof' tag`, + Err: `workflow.states[0].parallelState.branches[0].name is required +workflow.states[0].parallelState.branches[0].actions is required`, }, { - desp: "invalid numCompleted `int`", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - End: &End{ - Terminate: true, - }, - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromInt(0), - }, + Desp: "min", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches[0].Actions = []Action{} + return *model }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + Err: `workflow.states[0].parallelState.branches[0].actions must have the minimum 1`, }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestBranchTimeoutsStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + parallelState := buildParallelState(baseWorkflow, "start state") + buildEndByState(parallelState, true, false) + branch := buildBranch(parallelState, "brach 1") + buildBranchTimeouts(branch) + action1 := buildActionByBranch(branch, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - desp: "invalid numCompleted string format", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - End: &End{ - Terminate: true, - }, - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("a"), - }, + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "PT5S" + model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "PT5S" + return *model }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, }, { - desp: "normal", - state: &State{ - BaseState: BaseState{ - Name: "1", - Type: "parallel", - End: &End{ - Terminate: true, - }, - }, - ParallelState: &ParallelState{ - Branches: []Branch{ - { - Name: "b1", - Actions: []Action{ - {}, - }, - }, - }, - CompletionType: CompletionTypeAtLeast, - NumCompleted: intstr.FromString("0"), - }, + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "" + model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "" + return *model }, - err: `Key: 'State.ParallelState.NumCompleted' Error:Field validation for 'NumCompleted' failed on the 'gt0' tag`, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "P5S" + model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "P5S" + return *model + }, + Err: `workflow.states[0].parallelState.branches[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S" +workflow.states[0].parallelState.branches[0].timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.state) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) + StructLevelValidationCtx(t, testCases) +} + +func TestParallelStateTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + parallelState := buildParallelState(baseWorkflow, "start state") + buildParallelStateTimeout(parallelState) + buildEndByState(parallelState, true, false) + branch := buildBranch(parallelState, "brach 1") + action1 := buildActionByBranch(branch, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Timeouts.BranchExecTimeout = "" + return *model + }, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].ParallelState.Timeouts.BranchExecTimeout = "P5S" + return *model + }, + Err: `workflow.states[0].parallelState.timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, + }, } + + StructLevelValidationCtx(t, testCases) } diff --git a/model/retry.go b/model/retry.go index 6ce8277..e3c7e10 100644 --- a/model/retry.go +++ b/model/retry.go @@ -17,6 +17,7 @@ package model import ( "k8s.io/apimachinery/pkg/util/intstr" + "github.com/serverlessworkflow/sdk-go/v2/util" "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" ) @@ -41,3 +42,15 @@ type Retry struct { // TODO: make iso8601duration compatible this type Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` } + +type retryUnmarshal Retry + +// UnmarshalJSON implements json.Unmarshaler +func (r *Retry) UnmarshalJSON(data []byte) error { + r.ApplyDefault() + return util.UnmarshalObject("retry", data, (*retryUnmarshal)(r)) +} + +func (r *Retry) ApplyDefault() { + r.MaxAttempts = intstr.FromInt(1) +} diff --git a/model/retry_validator.go b/model/retry_validator.go index 14886ce..b95e2f7 100644 --- a/model/retry_validator.go +++ b/model/retry_validator.go @@ -19,6 +19,7 @@ import ( validator "github.com/go-playground/validator/v10" "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go index 78f1e70..5a3bca0 100644 --- a/model/retry_validator_test.go +++ b/model/retry_validator_test.go @@ -18,103 +18,74 @@ import ( "testing" "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" ) func TestRetryStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - retryObj Retry - err string - } - testCases := []testCase{ - { - desp: "normal", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), - }, - err: ``, - }, - { - desp: "normal with all optinal", - retryObj: Retry{ - Name: "1", - }, - err: ``, - }, + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildRetryRef(baseWorkflow, action1, "retry 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - desp: "missing required name", - retryObj: Retry{ - Name: "", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Retries[0].Delay = "PT5S" + model.Retries[0].MaxDelay = "PT5S" + model.Retries[0].Increment = "PT5S" + model.Retries[0].Jitter = floatstr.FromString("PT5S") + return *model }, - err: `Key: 'Retry.Name' Error:Field validation for 'Name' failed on the 'required' tag`, }, { - desp: "invalid delay duration", - retryObj: Retry{ - Name: "1", - Delay: "P5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Retries[0].Name = "" + model.States[0].OperationState.Actions[0].RetryRef = "" + return *model }, - err: `Key: 'Retry.Delay' Error:Field validation for 'Delay' failed on the 'iso8601duration' tag`, + Err: `workflow.retries[0].name is required`, }, { - desp: "invdalid max delay duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "P5S", - Increment: "PT5S", - Jitter: floatstr.FromString("PT5S"), + Desp: "repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Retries = append(model.Retries, model.Retries[0]) + return *model }, - err: `Key: 'Retry.MaxDelay' Error:Field validation for 'MaxDelay' failed on the 'iso8601duration' tag`, + Err: `workflow.retries has duplicate "name"`, }, { - desp: "invalid increment duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "P5S", - Jitter: floatstr.FromString("PT5S"), + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].RetryRef = "invalid retry" + return *model }, - err: `Key: 'Retry.Increment' Error:Field validation for 'Increment' failed on the 'iso8601duration' tag`, + Err: `workflow.states[0].actions[0].retryRef don't exist "invalid retry"`, }, { - desp: "invalid jitter duration", - retryObj: Retry{ - Name: "1", - Delay: "PT5S", - MaxDelay: "PT5S", - Increment: "PT5S", - Jitter: floatstr.FromString("P5S"), + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Retries[0].Delay = "P5S" + model.Retries[0].MaxDelay = "P5S" + model.Retries[0].Increment = "P5S" + model.Retries[0].Jitter = floatstr.FromString("P5S") + + return *model }, - err: `Key: 'Retry.Jitter' Error:Field validation for 'Jitter' failed on the 'iso8601duration' tag`, + Err: `workflow.retries[0].delay invalid iso8601 duration "P5S" +workflow.retries[0].maxDelay invalid iso8601 duration "P5S" +workflow.retries[0].increment invalid iso8601 duration "P5S" +workflow.retries[0].jitter invalid iso8601 duration "P5S"`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.retryObj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } + StructLevelValidationCtx(t, testCases) } diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go index 47b6a1e..c960f3c 100644 --- a/model/sleep_state_test.go +++ b/model/sleep_state_test.go @@ -13,64 +13,3 @@ // limitations under the License. package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func TestSleepStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - state State - err string - } - testCases := []testCase{ - { - desp: "normal duration", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "sleep", - End: &End{ - Terminate: true, - }, - }, - SleepState: &SleepState{ - Duration: "PT10S", - }, - }, - err: ``, - }, - { - desp: "invalid duration", - state: State{ - BaseState: BaseState{ - Name: "1", - Type: "sleep", - }, - SleepState: &SleepState{ - Duration: "T10S", - }, - }, - err: `Key: 'State.SleepState.Duration' Error:Field validation for 'Duration' failed on the 'iso8601duration' tag`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.state) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/sleep_state_validator_test.go b/model/sleep_state_validator_test.go new file mode 100644 index 0000000..057d6b3 --- /dev/null +++ b/model/sleep_state_validator_test.go @@ -0,0 +1,95 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func buildSleepState(workflow *Workflow, name, duration string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeSleep, + }, + SleepState: &SleepState{ + Duration: duration, + }, + } + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func buildSleepStateTimeout(state *State) *SleepStateTimeout { + state.SleepState.Timeouts = &SleepStateTimeout{} + return state.SleepState.Timeouts +} + +func TestSleepStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") + buildEndByState(sleepState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SleepState.Duration = "" + return *model + }, + Err: `workflow.states[0].sleepState.duration is required`, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SleepState.Duration = "P5S" + return *model + }, + Err: `workflow.states[0].sleepState.duration invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestSleepStateTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") + buildEndByState(sleepState, true, false) + sleepStateTimeout := buildSleepStateTimeout(sleepState) + buildStateExecTimeoutBySleepStateTimeout(sleepStateTimeout) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go index c487629..0a53fd8 100644 --- a/model/state_exec_timeout.go +++ b/model/state_exec_timeout.go @@ -14,6 +14,8 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + // StateExecTimeout defines workflow state execution timeout type StateExecTimeout struct { // Single state execution timeout, not including retries (ISO 8601 duration format) @@ -28,5 +30,5 @@ type stateExecTimeoutUnmarshal StateExecTimeout // UnmarshalJSON unmarshal StateExecTimeout object from json bytes func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) + return util.UnmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) } diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go index 4f8ff08..6030395 100644 --- a/model/state_exec_timeout_test.go +++ b/model/state_exec_timeout_test.go @@ -18,8 +18,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { @@ -113,65 +111,3 @@ func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { }) } } - -func TestStateExecTimeoutStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - timeout StateExecTimeout - err string - } - testCases := []testCase{ - { - desp: "normal total", - timeout: StateExecTimeout{ - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal total & single", - timeout: StateExecTimeout{ - Single: "PT10S", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "missing total", - timeout: StateExecTimeout{ - Single: "PT10S", - Total: "", - }, - err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'required' tag`, - }, - { - desp: "invalid total duration", - timeout: StateExecTimeout{ - Single: "PT10S", - Total: "T10S", - }, - err: `Key: 'StateExecTimeout.Total' Error:Field validation for 'Total' failed on the 'iso8601duration' tag`, - }, - { - desp: "invalid single duration", - timeout: StateExecTimeout{ - Single: "T10S", - Total: "PT10S", - }, - err: `Key: 'StateExecTimeout.Single' Error:Field validation for 'Single' failed on the 'iso8601duration' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.timeout) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/state_exec_timeout_validator_test.go b/model/state_exec_timeout_validator_test.go new file mode 100644 index 0000000..5a2f794 --- /dev/null +++ b/model/state_exec_timeout_validator_test.go @@ -0,0 +1,95 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func buildStateExecTimeoutByTimeouts(timeouts *Timeouts) *StateExecTimeout { + stateExecTimeout := StateExecTimeout{ + Total: "PT5S", + Single: "PT5S", + } + timeouts.StateExecTimeout = &stateExecTimeout + return timeouts.StateExecTimeout +} + +func buildStateExecTimeoutBySleepStateTimeout(timeouts *SleepStateTimeout) *StateExecTimeout { + stateExecTimeout := StateExecTimeout{ + Total: "PT5S", + } + timeouts.StateExecTimeout = &stateExecTimeout + return timeouts.StateExecTimeout +} + +func buildStateExecTimeoutByOperationStateTimeout(timeouts *OperationStateTimeout) *StateExecTimeout { + stateExecTimeout := StateExecTimeout{ + Total: "PT5S", + Single: "PT5S", + } + timeouts.ActionExecTimeout = "PT5S" + timeouts.StateExecTimeout = &stateExecTimeout + return timeouts.StateExecTimeout +} + +func TestStateExecTimeoutStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + timeouts := buildTimeouts(baseWorkflow) + buildStateExecTimeoutByTimeouts(timeouts) + + callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") + buildEndByState(callbackState, true, false) + buildCallbackStateTimeout(callbackState.CallbackState) + buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, + }, + { + Desp: "omitempty", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "" + return *model + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "" + return *model + }, + Err: `workflow.timeouts.stateExecTimeout.total is required`, + }, + { + Desp: "iso8601duration", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "P5S" + model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "P5S" + return *model + }, + Err: `workflow.timeouts.stateExecTimeout.single invalid iso8601 duration "P5S" +workflow.timeouts.stateExecTimeout.total invalid iso8601 duration "P5S"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/states.go b/model/states.go index 42c7b48..5842d9a 100644 --- a/model/states.go +++ b/model/states.go @@ -18,6 +18,8 @@ import ( "encoding/json" "fmt" "strings" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // StateType ... @@ -204,7 +206,7 @@ type unmarshalState State // UnmarshalJSON implements json.Unmarshaler func (s *State) UnmarshalJSON(data []byte) error { - if err := unmarshalObject("state", data, (*unmarshalState)(s)); err != nil { + if err := util.UnmarshalObject("state", data, (*unmarshalState)(s)); err != nil { return err } @@ -225,7 +227,7 @@ func (s *State) UnmarshalJSON(data []byte) error { case StateTypeOperation: state := &OperationState{} - if err := unmarshalObject("states", data, state); err != nil { + if err := util.UnmarshalObject("states", data, state); err != nil { return err } s.OperationState = state diff --git a/model/states_validator.go b/model/states_validator.go index ee55846..0ce87dc 100644 --- a/model/states_validator.go +++ b/model/states_validator.go @@ -15,19 +15,37 @@ package model import ( - "reflect" - validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func init() { - val.GetValidator().RegisterStructValidation(baseStateStructLevelValidation, BaseState{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(baseStateStructLevelValidationCtx), BaseState{}) } -func baseStateStructLevelValidation(structLevel validator.StructLevel) { +func baseStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { baseState := structLevel.Current().Interface().(BaseState) if baseState.Type != StateTypeSwitch { - validTransitionAndEnd(structLevel, reflect.ValueOf(baseState), baseState.Transition, baseState.End) + validTransitionAndEnd(structLevel, baseState, baseState.Transition, baseState.End) + } + + if baseState.CompensatedBy != "" { + if baseState.UsedForCompensation { + structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagRecursiveCompensation, "") + } + + if ctx.ExistState(baseState.CompensatedBy) { + value := ctx.States[baseState.CompensatedBy].BaseState + if value.UsedForCompensation && value.Type == StateTypeEvent { + structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedbyEventState, "") + + } else if !value.UsedForCompensation { + structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedby, "") + } + + } else { + structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagExists, "") + } } } diff --git a/model/states_validator_test.go b/model/states_validator_test.go index 296f726..8766d87 100644 --- a/model/states_validator_test.go +++ b/model/states_validator_test.go @@ -16,120 +16,136 @@ package model import ( "testing" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" ) -var stateTransitionDefault = State{ - BaseState: BaseState{ - Name: "name state", - Type: StateTypeOperation, - Transition: &Transition{ - NextState: "next name state", +func TestBaseStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + baseWorkflow.States = make(States, 0, 3) + + operationState := buildOperationState(baseWorkflow, "start state 1") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + operationState2 := buildOperationState(baseWorkflow, "state 2") + buildEndByState(operationState2, true, false) + action2 := buildActionByOperationState(operationState2, "action 2") + buildFunctionRef(baseWorkflow, action2, "function 2") + + eventState := buildEventState(baseWorkflow, "state 3") + buildOnEvents(baseWorkflow, eventState, "event 1") + buildEndByState(eventState, true, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + return *model + }, }, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{ - {}, + { + Desp: "repeat name", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States = []State{model.States[0], model.States[0]} + return *model + }, + Err: `workflow.states has duplicate "name"`, }, - }, -} - -var stateEndDefault = State{ - BaseState: BaseState{ - Name: "name state", - Type: StateTypeOperation, - End: &End{ - Terminate: true, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.CompensatedBy = "invalid state compensate by" + return *model + }, + Err: `workflow.states[0].compensatedBy don't exist "invalid state compensate by"`, }, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{ - {}, + { + Desp: "tagcompensatedby", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.CompensatedBy = model.States[1].BaseState.Name + return *model + }, + Err: `workflow.states[0].compensatedBy = "state 2" is not defined as usedForCompensation`, }, - }, -} - -var switchStateTransitionDefault = State{ - BaseState: BaseState{ - Name: "name state", - Type: StateTypeSwitch, - }, - SwitchState: &SwitchState{ - DataConditions: []DataCondition{ - { - Condition: "${ .applicant | .age >= 18 }", - Transition: &Transition{ - NextState: "nex state", - }, + { + Desp: "compensatedbyeventstate", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[2].BaseState.UsedForCompensation = true + model.States[0].BaseState.CompensatedBy = model.States[2].BaseState.Name + return *model }, + Err: `workflow.states[0].compensatedBy = "state 3" is defined as usedForCompensation and cannot be an event state`, }, - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "nex state", + { + Desp: "recursivecompensation", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.UsedForCompensation = true + model.States[0].BaseState.CompensatedBy = model.States[0].BaseState.Name + return *model }, + Err: `workflow.states[0].compensatedBy = "start state 1" is defined as usedForCompensation (cannot themselves set their compensatedBy)`, }, - }, + } + + StructLevelValidationCtx(t, testCases) } func TestStateStructLevelValidation(t *testing.T) { - type testCase struct { - name string - instance State - err string - } + baseWorkflow := buildWorkflow() + baseWorkflow.States = make(States, 0, 2) - testCases := []testCase{ - { - name: "state transition success", - instance: stateTransitionDefault, - err: ``, - }, + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + operationState2 := buildOperationState(baseWorkflow, "next state") + buildEndByState(operationState2, true, false) + action2 := buildActionByOperationState(operationState2, "action 2") + buildFunctionRef(baseWorkflow, action2, "function 2") + + testCases := []ValidationCase{ { - name: "state end success", - instance: stateEndDefault, - err: ``, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() + }, }, { - name: "switch state success", - instance: switchStateTransitionDefault, - err: ``, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.End = nil + return *model + }, + Err: `workflow.states[0].transition is required`, }, { - name: "state end and transition", - instance: func() State { - s := stateTransitionDefault - s.End = stateEndDefault.End - return s - }(), - err: `Key: 'State.BaseState.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildTransitionByState(&model.States[0], &model.States[1], false) + + return *model + }, + Err: `workflow.states[0].transition exclusive`, }, { - name: "basestate without end and transition", - instance: func() State { - s := stateTransitionDefault - s.Transition = nil - return s - }(), - err: `Key: 'State.BaseState.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.Type = StateTypeOperation + "invalid" + return *model + }, + Err: `workflow.states[0].type need by one of [delay event operation parallel switch foreach inject callback sleep]`, }, } - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := val.GetValidator().Struct(tc.instance) - - if tc.err != "" { - assert.Error(t, err) - if err != nil { - assert.Equal(t, tc.err, err.Error()) - } - return - } - assert.NoError(t, err) - }) - } + StructLevelValidationCtx(t, testCases) } diff --git a/model/switch_state.go b/model/switch_state.go index 70f1b28..15d1a6d 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -17,21 +17,25 @@ package model import ( "encoding/json" "strings" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) +type EventConditions []EventCondition + // SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. type SwitchState struct { // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. // Default transition of the workflow if there is no matching data conditions. Can include a transition or // end definition. - DefaultCondition DefaultCondition `json:"defaultCondition" validate:"required_without=EventConditions"` + DefaultCondition DefaultCondition `json:"defaultCondition"` // Defines conditions evaluated against events. // +optional - EventConditions []EventCondition `json:"eventConditions" validate:"required_without=DefaultCondition"` + EventConditions EventConditions `json:"eventConditions" validate:"dive"` // Defines conditions evaluated against data // +optional - DataConditions []DataCondition `json:"dataConditions" validate:"omitempty,min=1,dive"` + DataConditions []DataCondition `json:"dataConditions" validate:"dive"` // SwitchState specific timeouts // +optional Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` @@ -74,7 +78,7 @@ type defaultConditionUnmarshal DefaultCondition // UnmarshalJSON implements json.Unmarshaler func (e *DefaultCondition) UnmarshalJSON(data []byte) error { var nextState string - err := unmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) + err := util.UnmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) if err != nil { return err } diff --git a/model/switch_state_validator.go b/model/switch_state_validator.go index 83f1379..5738104 100644 --- a/model/switch_state_validator.go +++ b/model/switch_state_validator.go @@ -18,42 +18,47 @@ import ( "reflect" validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func init() { - val.GetValidator().RegisterStructValidation(switchStateStructLevelValidation, SwitchState{}) - val.GetValidator().RegisterStructValidation(defaultConditionStructLevelValidation, DefaultCondition{}) - val.GetValidator().RegisterStructValidation(eventConditionStructLevelValidation, EventCondition{}) - val.GetValidator().RegisterStructValidation(dataConditionStructLevelValidation, DataCondition{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(switchStateStructLevelValidation), SwitchState{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(defaultConditionStructLevelValidation), DefaultCondition{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventConditionStructLevelValidationCtx), EventCondition{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(dataConditionStructLevelValidation), DataCondition{}) } // SwitchStateStructLevelValidation custom validator for SwitchState -func switchStateStructLevelValidation(structLevel validator.StructLevel) { +func switchStateStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { switchState := structLevel.Current().Interface().(SwitchState) switch { case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "required", "must have one of dataConditions, eventConditions") + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagRequired, "") case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", "exclusive", "must have one of dataConditions, eventConditions") + structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagExclusive, "") } } // DefaultConditionStructLevelValidation custom validator for DefaultCondition -func defaultConditionStructLevelValidation(structLevel validator.StructLevel) { +func defaultConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { defaultCondition := structLevel.Current().Interface().(DefaultCondition) - validTransitionAndEnd(structLevel, reflect.ValueOf(defaultCondition), defaultCondition.Transition, defaultCondition.End) + validTransitionAndEnd(structLevel, defaultCondition, defaultCondition.Transition, defaultCondition.End) } // EventConditionStructLevelValidation custom validator for EventCondition -func eventConditionStructLevelValidation(structLevel validator.StructLevel) { +func eventConditionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { eventCondition := structLevel.Current().Interface().(EventCondition) - validTransitionAndEnd(structLevel, reflect.ValueOf(eventCondition), eventCondition.Transition, eventCondition.End) + validTransitionAndEnd(structLevel, eventCondition, eventCondition.Transition, eventCondition.End) + + if eventCondition.EventRef != "" && !ctx.ExistEvent(eventCondition.EventRef) { + structLevel.ReportError(eventCondition.EventRef, "eventRef", "EventRef", val.TagExists, "") + } } // DataConditionStructLevelValidation custom validator for DataCondition -func dataConditionStructLevelValidation(structLevel validator.StructLevel) { +func dataConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { dataCondition := structLevel.Current().Interface().(DataCondition) - validTransitionAndEnd(structLevel, reflect.ValueOf(dataCondition), dataCondition.Transition, dataCondition.End) + validTransitionAndEnd(structLevel, dataCondition, dataCondition.Transition, dataCondition.End) } diff --git a/model/switch_state_validator_test.go b/model/switch_state_validator_test.go index 7bddc46..9c40462 100644 --- a/model/switch_state_validator_test.go +++ b/model/switch_state_validator_test.go @@ -16,314 +16,259 @@ package model import ( "testing" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" - "github.com/stretchr/testify/assert" ) -func TestSwitchStateStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj State - err string - } - testCases := []testCase{ - { - desp: "normal & eventConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, - }, - err: ``, +func buildSwitchState(workflow *Workflow, name string) *State { + state := State{ + BaseState: BaseState{ + Name: name, + Type: StateTypeSwitch, }, + SwitchState: &SwitchState{}, + } + + workflow.States = append(workflow.States, state) + return &workflow.States[len(workflow.States)-1] +} + +func buildDefaultCondition(state *State) *DefaultCondition { + state.SwitchState.DefaultCondition = DefaultCondition{} + return &state.SwitchState.DefaultCondition +} + +func buildDataCondition(state *State, name, condition string) *DataCondition { + if state.SwitchState.DataConditions == nil { + state.SwitchState.DataConditions = []DataCondition{} + } + + dataCondition := DataCondition{ + Name: name, + Condition: condition, + } + + state.SwitchState.DataConditions = append(state.SwitchState.DataConditions, dataCondition) + return &state.SwitchState.DataConditions[len(state.SwitchState.DataConditions)-1] +} + +func buildEventCondition(workflow *Workflow, state *State, name, eventRef string) (*Event, *EventCondition) { + workflow.Events = append(workflow.Events, Event{ + Name: eventRef, + Type: "event type", + Kind: EventKindConsumed, + }) + + eventCondition := EventCondition{ + Name: name, + EventRef: eventRef, + } + + state.SwitchState.EventConditions = append(state.SwitchState.EventConditions, eventCondition) + return &workflow.Events[len(workflow.Events)-1], &state.SwitchState.EventConditions[len(state.SwitchState.EventConditions)-1] +} + +func TestSwitchStateStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + swithState := buildSwitchState(baseWorkflow, "start state") + defaultCondition := buildDefaultCondition(swithState) + buildEndByDefaultCondition(defaultCondition, true, false) + + dataCondition := buildDataCondition(swithState, "data condition 1", "1=1") + buildEndByDataCondition(dataCondition, true, false) + + testCases := []ValidationCase{ { - desp: "normal & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - err: ``, }, { - desp: "missing eventConditions & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - }, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SwitchState.DataConditions = nil + return *model }, - err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'required' tag`, + Err: `workflow.states[0].switchState.dataConditions is required`, }, { - desp: "exclusive eventConditions & dataConditions", - obj: State{ - BaseState: BaseState{ - Name: "1", - Type: "switch", - }, - SwitchState: &SwitchState{ - DefaultCondition: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, - }, - EventConditions: []EventCondition{ - { - EventRef: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - DataConditions: []DataCondition{ - { - Condition: "1", - Transition: &Transition{ - NextState: "2", - }, - }, - }, - }, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildEventCondition(model, &model.States[0], "event condition", "event 1") + buildEndByEventCondition(&model.States[0].SwitchState.EventConditions[0], true, false) + return *model }, - err: `Key: 'State.SwitchState.DataConditions' Error:Field validation for 'DataConditions' failed on the 'exclusive' tag`, + Err: `workflow.states[0].switchState.dataConditions exclusive`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - }) - } + + StructLevelValidationCtx(t, testCases) } func TestDefaultConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj DefaultCondition - err string - } - testCases := []testCase{ + baseWorkflow := buildWorkflow() + buildSwitchState(baseWorkflow, "start state") + buildDefaultCondition(&baseWorkflow.States[0]) + + buildDataCondition(&baseWorkflow.States[0], "data condition 1", "1=1") + buildEndByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[0], true, false) + buildDataCondition(&baseWorkflow.States[0], "data condition 2", "1=1") + + buildOperationState(baseWorkflow, "end state") + buildEndByState(&baseWorkflow.States[1], true, false) + buildActionByOperationState(&baseWorkflow.States[1], "action 1") + buildFunctionRef(baseWorkflow, &baseWorkflow.States[1].OperationState.Actions[0], "function 1") + + buildTransitionByDefaultCondition(&baseWorkflow.States[0].SwitchState.DefaultCondition, &baseWorkflow.States[1]) + buildTransitionByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[1], &baseWorkflow.States[1], false) + + testCases := []ValidationCase{ { - desp: "normal & end", - obj: DefaultCondition{ - End: &End{ - Terminate: true, - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - err: ``, }, { - desp: "normal & transition", - obj: DefaultCondition{ - Transition: &Transition{ - NextState: "1", - }, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SwitchState.DataConditions[0].End = nil + return *model }, - err: ``, - }, - { - desp: "missing end & transition", - obj: DefaultCondition{}, - err: `DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, }, { - desp: "exclusive end & transition", - obj: DefaultCondition{ - End: &End{ - Terminate: true, - }, - Transition: &Transition{ - NextState: "1", - }, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) + return *model }, - err: `Key: 'DefaultCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, }, } - for _, tc := range testCases[2:] { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } + + StructLevelValidationCtx(t, testCases) +} + +func TestSwitchStateTimeoutStructLevelValidation(t *testing.T) { } func TestEventConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj EventCondition - err string - } - testCases := []testCase{ + baseWorkflow := buildWorkflow() + baseWorkflow.States = make(States, 0, 2) + + // switch state + switchState := buildSwitchState(baseWorkflow, "start state") + + // default condition + defaultCondition := buildDefaultCondition(switchState) + buildEndByDefaultCondition(defaultCondition, true, false) + + // event condition 1 + _, eventCondition := buildEventCondition(baseWorkflow, switchState, "data condition 1", "event 1") + buildEndByEventCondition(eventCondition, true, false) + + // event condition 2 + _, eventCondition2 := buildEventCondition(baseWorkflow, switchState, "data condition 2", "event 2") + buildEndByEventCondition(eventCondition2, true, false) + + // operation state + operationState := buildOperationState(baseWorkflow, "end state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + // trasition switch state to operation state + buildTransitionByEventCondition(eventCondition, operationState, false) + + testCases := []ValidationCase{ { - desp: "normal & end", - obj: EventCondition{ - EventRef: "1", - End: &End{ - Terminate: true, - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - err: ``, }, { - desp: "normal & transition", - obj: EventCondition{ - EventRef: "1", - Transition: &Transition{ - NextState: "1", - }, + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SwitchState.EventConditions[0].EventRef = "event not found" + return *model }, - err: ``, + Err: `workflow.states[0].switchState.eventConditions[0].eventRef don't exist "event not found"`, }, { - desp: "missing end & transition", - obj: EventCondition{ - EventRef: "1", + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SwitchState.EventConditions[0].End = nil + return *model }, - err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + Err: `workflow.states[0].switchState.eventConditions[0].transition is required`, }, { - desp: "exclusive end & transition", - obj: EventCondition{ - EventRef: "1", - End: &End{ - Terminate: true, - }, - Transition: &Transition{ - NextState: "1", - }, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildTransitionByEventCondition(&model.States[0].SwitchState.EventConditions[0], &model.States[1], false) + return *model }, - err: `Key: 'EventCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + Err: `workflow.states[0].switchState.eventConditions[0].transition exclusive`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } + + StructLevelValidationCtx(t, testCases) } func TestDataConditionStructLevelValidation(t *testing.T) { - type testCase struct { - desp string - obj DataCondition - err string - } - testCases := []testCase{ - { - desp: "normal & end", - obj: DataCondition{ - Condition: "1", - End: &End{ - Terminate: true, - }, - }, - err: ``, - }, + baseWorkflow := buildWorkflow() + // switch state + swithcState := buildSwitchState(baseWorkflow, "start state") + + // default condition + defaultCondition := buildDefaultCondition(swithcState) + buildEndByDefaultCondition(defaultCondition, true, false) + + // data condition + dataCondition := buildDataCondition(swithcState, "data condition 1", "1=1") + buildEndByDataCondition(dataCondition, true, false) + + // operation state + operationState := buildOperationState(baseWorkflow, "end state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - desp: "normal & transition", - obj: DataCondition{ - Condition: "1", - Transition: &Transition{ - NextState: "1", - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - err: ``, }, { - desp: "missing end & transition", - obj: DataCondition{ - Condition: "1", + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].SwitchState.DataConditions[0].End = nil + return *model }, - err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'required' tag`, + Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, }, { - desp: "exclusive end & transition", - obj: DataCondition{ - Condition: "1", - End: &End{ - Terminate: true, - }, - Transition: &Transition{ - NextState: "1", - }, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) + return *model }, - err: `Key: 'DataCondition.Transition' Error:Field validation for 'Transition' failed on the 'exclusive' tag`, + Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.obj) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } + + StructLevelValidationCtx(t, testCases) } diff --git a/model/workflow.go b/model/workflow.go index c3b9694..58b382a 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -16,13 +16,18 @@ package model import ( "encoding/json" + + "github.com/serverlessworkflow/sdk-go/v2/util" ) // InvokeKind defines how the target is invoked. type InvokeKind string func (i InvokeKind) KindValues() []string { - return []string{string(InvokeKindSync), string(InvokeKindAsync)} + return []string{ + string(InvokeKindSync), + string(InvokeKindAsync), + } } func (i InvokeKind) String() string { @@ -40,6 +45,17 @@ const ( // ActionMode specifies how actions are to be performed. type ActionMode string +func (i ActionMode) KindValues() []string { + return []string{ + string(ActionModeSequential), + string(ActionModeParallel), + } +} + +func (i ActionMode) String() string { + return string(i) +} + const ( // ActionModeSequential specifies actions should be performed in sequence ActionModeSequential ActionMode = "sequential" @@ -55,6 +71,17 @@ const ( type ExpressionLangType string +func (i ExpressionLangType) KindValues() []string { + return []string{ + string(JqExpressionLang), + string(JsonPathExpressionLang), + } +} + +func (i ExpressionLangType) String() string { + return string(i) +} + const ( //JqExpressionLang ... JqExpressionLang ExpressionLangType = "jq" @@ -99,7 +126,7 @@ type BaseWorkflow struct { // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, // inside your Workflow Expressions. // +optional - Secrets Secrets `json:"secrets,omitempty"` + Secrets Secrets `json:"secrets,omitempty" validate:"unique"` // Constants Workflow constants are used to define static, and immutable, data which is available to // Workflow Expressions. // +optional @@ -108,13 +135,13 @@ type BaseWorkflow struct { // +kubebuilder:validation:Enum=jq;jsonpath // +kubebuilder:default=jq // +optional - ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"omitempty,min=1,oneof=jq jsonpath"` + ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"required,oneofkind"` // Defines the workflow default timeout settings. // +optional Timeouts *Timeouts `json:"timeouts,omitempty"` // Defines checked errors that can be explicitly handled during workflow execution. // +optional - Errors Errors `json:"errors,omitempty"` + Errors Errors `json:"errors,omitempty" validate:"unique=Name,dive"` // If "true", workflow instances is not terminated when there are no active execution paths. // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" // +optional @@ -133,7 +160,7 @@ type BaseWorkflow struct { // +kubebuilder:validation:Schemaless // +kubebuilder:pruning:PreserveUnknownFields // +optional - Auth Auths `json:"auth,omitempty" validate:"omitempty"` + Auth Auths `json:"auth,omitempty" validate:"unique=Name,dive"` } type Auths []Auth @@ -142,7 +169,7 @@ type authsUnmarshal Auths // UnmarshalJSON implements json.Unmarshaler func (r *Auths) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) + return util.UnmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) } type Errors []Error @@ -151,21 +178,20 @@ type errorsUnmarshal Errors // UnmarshalJSON implements json.Unmarshaler func (e *Errors) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) + return util.UnmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) } // Workflow base definition type Workflow struct { BaseWorkflow `json:",inline"` - // +kubebuilder:validation:MinItems=1 // +kubebuilder:pruning:PreserveUnknownFields - States []State `json:"states" validate:"required,min=1,dive"` + States States `json:"states" validate:"min=1,unique=Name,dive"` // +optional - Events Events `json:"events,omitempty"` + Events Events `json:"events,omitempty" validate:"unique=Name,dive"` // +optional - Functions Functions `json:"functions,omitempty"` + Functions Functions `json:"functions,omitempty" validate:"unique=Name,dive"` // +optional - Retries Retries `json:"retries,omitempty" validate:"dive"` + Retries Retries `json:"retries,omitempty" validate:"unique=Name,dive"` } type workflowUnmarshal Workflow @@ -173,7 +199,7 @@ type workflowUnmarshal Workflow // UnmarshalJSON implementation for json Unmarshal function for the Workflow type func (w *Workflow) UnmarshalJSON(data []byte) error { w.ApplyDefault() - err := unmarshalObject("workflow", data, (*workflowUnmarshal)(w)) + err := util.UnmarshalObject("workflow", data, (*workflowUnmarshal)(w)) if err != nil { return err } @@ -192,13 +218,14 @@ func (w *Workflow) ApplyDefault() { w.ExpressionLang = JqExpressionLang } +// +kubebuilder:validation:MinItems=1 type States []State type statesUnmarshal States // UnmarshalJSON implements json.Unmarshaler func (s *States) UnmarshalJSON(data []byte) error { - return unmarshalObject("states", data, (*statesUnmarshal)(s)) + return util.UnmarshalObject("states", data, (*statesUnmarshal)(s)) } type Events []Event @@ -207,7 +234,7 @@ type eventsUnmarshal Events // UnmarshalJSON implements json.Unmarshaler func (e *Events) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) + return util.UnmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) } type Functions []Function @@ -216,7 +243,7 @@ type functionsUnmarshal Functions // UnmarshalJSON implements json.Unmarshaler func (f *Functions) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) + return util.UnmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) } type Retries []Retry @@ -225,7 +252,7 @@ type retriesUnmarshal Retries // UnmarshalJSON implements json.Unmarshaler func (r *Retries) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) + return util.UnmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) } // Timeouts ... @@ -252,7 +279,7 @@ type timeoutsUnmarshal Timeouts // UnmarshalJSON implements json.Unmarshaler func (t *Timeouts) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) + return util.UnmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) } // WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration @@ -260,7 +287,7 @@ func (t *Timeouts) UnmarshalJSON(data []byte) error { type WorkflowExecTimeout struct { // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. // +kubebuilder:default=unlimited - Duration string `json:"duration" validate:"required,min=1"` + Duration string `json:"duration" validate:"required,min=1,iso8601duration"` // If false, workflow instance is allowed to finish current execution. If true, current workflow execution // is stopped immediately. Default is false. // +optional @@ -275,7 +302,7 @@ type workflowExecTimeoutUnmarshal WorkflowExecTimeout // UnmarshalJSON implements json.Unmarshaler func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { w.ApplyDefault() - return unmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) + return util.UnmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) } // ApplyDefault set the default values for Workflow Exec Timeout @@ -312,7 +339,7 @@ type startUnmarshal Start // UnmarshalJSON implements json.Unmarshaler func (s *Start) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) + return util.UnmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) } // Schedule ... @@ -335,7 +362,7 @@ type scheduleUnmarshal Schedule // UnmarshalJSON implements json.Unmarshaler func (s *Schedule) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) + return util.UnmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) } // Cron ... @@ -352,12 +379,13 @@ type cronUnmarshal Cron // UnmarshalJSON custom unmarshal function for Cron func (c *Cron) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) + return util.UnmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) } // Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). // Each state can define a transition definition that is used to determine which state to transition to next. type Transition struct { + stateParent *State `json:"-"` // used in validation // Name of the state to transition to next. // +kubebuilder:validation:Required NextState string `json:"nextState" validate:"required,min=1"` @@ -374,7 +402,7 @@ type transitionUnmarshal Transition // UnmarshalJSON implements json.Unmarshaler func (t *Transition) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) + return util.UnmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) } // OnError ... @@ -382,7 +410,7 @@ type OnError struct { // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used ErrorRef string `json:"errorRef,omitempty"` // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used - ErrorRefs []string `json:"errorRefs,omitempty"` + ErrorRefs []string `json:"errorRefs,omitempty" validate:"omitempty,unique"` // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if // retries were unsuccessful. // +kubebuilder:validation:Schemaless @@ -418,7 +446,7 @@ type endUnmarshal End // UnmarshalJSON implements json.Unmarshaler func (e *End) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) + return util.UnmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) } // ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) @@ -443,7 +471,7 @@ type continueAsUnmarshal ContinueAs // UnmarshalJSON implements json.Unmarshaler func (c *ContinueAs) UnmarshalJSON(data []byte) error { - return unmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) + return util.UnmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) } // ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a @@ -483,7 +511,7 @@ type dataInputSchemaUnmarshal DataInputSchema // UnmarshalJSON implements json.Unmarshaler func (d *DataInputSchema) UnmarshalJSON(data []byte) error { d.ApplyDefault() - return unmarshalPrimitiveOrObject("dataInputSchema", data, &d.Schema, (*dataInputSchemaUnmarshal)(d)) + return util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &d.Schema, (*dataInputSchemaUnmarshal)(d)) } // ApplyDefault set the default values for Data Input Schema @@ -499,7 +527,7 @@ type secretsUnmarshal Secrets // UnmarshalJSON implements json.Unmarshaler func (s *Secrets) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) + return util.UnmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) } // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. @@ -511,7 +539,7 @@ type Constants struct { // UnmarshalJSON implements json.Unmarshaler func (c *Constants) UnmarshalJSON(data []byte) error { - return unmarshalObjectOrFile("constants", data, &c.Data) + return util.UnmarshalObjectOrFile("constants", data, &c.Data) } type ConstantsData map[string]json.RawMessage diff --git a/model/workflow_ref.go b/model/workflow_ref.go index f0ec215..4c558cc 100644 --- a/model/workflow_ref.go +++ b/model/workflow_ref.go @@ -14,6 +14,27 @@ package model +import "github.com/serverlessworkflow/sdk-go/v2/util" + +// CompletionType define on how to complete branch execution. +type OnParentCompleteType string + +func (i OnParentCompleteType) KindValues() []string { + return []string{ + string(OnParentCompleteTypeTerminate), + string(OnParentCompleteTypeContinue), + } +} + +func (i OnParentCompleteType) String() string { + return string(i) +} + +const ( + OnParentCompleteTypeTerminate OnParentCompleteType = "terminate" + OnParentCompleteTypeContinue OnParentCompleteType = "continue" +) + // WorkflowRef holds a reference for a workflow definition type WorkflowRef struct { // Sub-workflow unique id @@ -32,7 +53,7 @@ type WorkflowRef struct { // is 'async'. Defaults to terminate. // +kubebuilder:validation:Enum=terminate;continue // +kubebuilder:default=terminate - OnParentComplete string `json:"onParentComplete,omitempty" validate:"required,oneof=terminate continue"` + OnParentComplete OnParentCompleteType `json:"onParentComplete,omitempty" validate:"required,oneofkind"` } type workflowRefUnmarshal WorkflowRef @@ -40,7 +61,7 @@ type workflowRefUnmarshal WorkflowRef // UnmarshalJSON implements json.Unmarshaler func (s *WorkflowRef) UnmarshalJSON(data []byte) error { s.ApplyDefault() - return unmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) + return util.UnmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) } // ApplyDefault set the default values for Workflow Ref diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go index 4788a16..4a69fb5 100644 --- a/model/workflow_ref_test.go +++ b/model/workflow_ref_test.go @@ -19,8 +19,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func TestWorkflowRefUnmarshalJSON(t *testing.T) { @@ -105,76 +103,3 @@ func TestWorkflowRefUnmarshalJSON(t *testing.T) { }) } } - -func TestWorkflowRefValidate(t *testing.T) { - type testCase struct { - desp string - workflowRef WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "all field & defaults", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "all field", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "missing workflowId", - workflowRef: WorkflowRef{ - WorkflowID: "", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: `Key: 'WorkflowRef.WorkflowID' Error:Field validation for 'WorkflowID' failed on the 'required' tag`, - }, - { - desp: "invalid invoke", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: "sync1", - OnParentComplete: "terminate", - }, - err: `Key: 'WorkflowRef.Invoke' Error:Field validation for 'Invoke' failed on the 'oneofkind' tag`, - }, - { - desp: "invalid onParentComplete", - workflowRef: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindSync, - OnParentComplete: "terminate1", - }, - err: `Key: 'WorkflowRef.OnParentComplete' Error:Field validation for 'OnParentComplete' failed on the 'oneof' tag`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := val.GetValidator().Struct(tc.workflowRef) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} diff --git a/model/workflow_ref_validator_test.go b/model/workflow_ref_validator_test.go new file mode 100644 index 0000000..96a7f9c --- /dev/null +++ b/model/workflow_ref_validator_test.go @@ -0,0 +1,68 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "testing" + +func TestWorkflowRefStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(&baseWorkflow.States[0], true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + baseWorkflow.States[0].OperationState.Actions[0].FunctionRef = nil + baseWorkflow.States[0].OperationState.Actions[0].SubFlowRef = &WorkflowRef{ + WorkflowID: "workflowID", + Invoke: InvokeKindSync, + OnParentComplete: OnParentCompleteTypeTerminate, + } + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].SubFlowRef.WorkflowID = "" + model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "" + model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "" + return *model + }, + Err: `workflow.states[0].actions[0].subFlowRef.workflowID is required +workflow.states[0].actions[0].subFlowRef.invoke is required +workflow.states[0].actions[0].subFlowRef.onParentComplete is required`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "invalid invoce" + model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "invalid parent complete" + return *model + }, + Err: `workflow.states[0].actions[0].subFlowRef.invoke need by one of [sync async] +workflow.states[0].actions[0].subFlowRef.onParentComplete need by one of [terminate continue]`, + }, + } + + StructLevelValidationCtx(t, testCases) +} diff --git a/model/workflow_test.go b/model/workflow_test.go index 86a0ecc..29a3720 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -21,6 +21,7 @@ import ( "net/http/httptest" "testing" + "github.com/serverlessworkflow/sdk-go/v2/util" "github.com/stretchr/testify/assert" ) @@ -567,7 +568,7 @@ func TestConstantsUnmarshalJSON(t *testing.T) { } })) defer server.Close() - httpClient = *server.Client() + util.HttpClient = *server.Client() type testCase struct { desp string diff --git a/model/workflow_validator.go b/model/workflow_validator.go index 2ea7cf5..7d94d1f 100644 --- a/model/workflow_validator.go +++ b/model/workflow_validator.go @@ -15,83 +15,216 @@ package model import ( - "reflect" + "context" validator "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func init() { - val.GetValidator().RegisterStructValidation(continueAsStructLevelValidation, ContinueAs{}) - val.GetValidator().RegisterStructValidation(workflowStructLevelValidation, Workflow{}) -} +type contextValueKey string + +const ValidatorContextValue contextValueKey = "value" + +type WorkflowValidator func(mapValues ValidatorContext, sl validator.StructLevel) -func continueAsStructLevelValidation(structLevel validator.StructLevel) { - continueAs := structLevel.Current().Interface().(ContinueAs) - if len(continueAs.WorkflowExecTimeout.Duration) > 0 { - if err := val.ValidateISO8601TimeDuration(continueAs.WorkflowExecTimeout.Duration); err != nil { - structLevel.ReportError(reflect.ValueOf(continueAs.WorkflowExecTimeout.Duration), - "workflowExecTimeout", "duration", "iso8601duration", "") +func ValidationWrap(fnCtx WorkflowValidator) validator.StructLevelFuncCtx { + return func(ctx context.Context, structLevel validator.StructLevel) { + if fnCtx != nil { + if mapValues, ok := ctx.Value(ValidatorContextValue).(ValidatorContext); ok { + fnCtx(mapValues, structLevel) + } } } } -// WorkflowStructLevelValidation custom validator -func workflowStructLevelValidation(structLevel validator.StructLevel) { - // unique name of the auth methods - // NOTE: we cannot add the custom validation of auth to Auth - // because `RegisterStructValidation` only works with struct type - wf := structLevel.Current().Interface().(Workflow) - dict := map[string]bool{} - - for _, a := range wf.BaseWorkflow.Auth { - if !dict[a.Name] { - dict[a.Name] = true - } else { - structLevel.ReportError(reflect.ValueOf(a.Name), "[]Auth.Name", "name", "reqnameunique", "") - } +type ValidatorContext struct { + States map[string]State + Functions map[string]Function + Events map[string]Event + Retries map[string]Retry + Errors map[string]Error +} + +func (c *ValidatorContext) init(workflow *Workflow) { + c.States = make(map[string]State, len(workflow.States)) + for _, state := range workflow.States { + c.States[state.BaseState.Name] = state } - startAndStatesTransitionValidator(structLevel, wf.BaseWorkflow.Start, wf.States) -} + c.Functions = make(map[string]Function, len(workflow.Functions)) + for _, function := range workflow.Functions { + c.Functions[function.Name] = function + } + + c.Events = make(map[string]Event, len(workflow.Events)) + for _, event := range workflow.Events { + c.Events[event.Name] = event + } -func startAndStatesTransitionValidator(structLevel validator.StructLevel, start *Start, states []State) { - statesMap := make(map[string]State, len(states)) - for _, state := range states { - statesMap[state.Name] = state + c.Retries = make(map[string]Retry, len(workflow.Retries)) + for _, retry := range workflow.Retries { + c.Retries[retry.Name] = retry } - if start != nil { - // if not exists the start transtion stop the states validations - if _, ok := statesMap[start.StateName]; !ok { - structLevel.ReportError(reflect.ValueOf(start), "Start", "start", "startnotexist", "") - return + c.Errors = make(map[string]Error, len(workflow.Errors)) + for _, error := range workflow.Errors { + c.Errors[error.Name] = error + } +} + +func (c *ValidatorContext) ExistState(name string) bool { + _, ok := c.States[name] + return ok +} + +func (c *ValidatorContext) ExistFunction(name string) bool { + _, ok := c.Functions[name] + return ok +} + +func (c *ValidatorContext) ExistEvent(name string) bool { + _, ok := c.Events[name] + return ok +} + +func (c *ValidatorContext) ExistRetry(name string) bool { + _, ok := c.Retries[name] + return ok +} + +func (c *ValidatorContext) ExistError(name string) bool { + _, ok := c.Errors[name] + return ok +} + +func NewValidatorContext(workflow *Workflow) context.Context { + for i := range workflow.States { + s := &workflow.States[i] + if s.BaseState.Transition != nil { + s.BaseState.Transition.stateParent = s + } + for _, onError := range s.BaseState.OnErrors { + if onError.Transition != nil { + onError.Transition.stateParent = s + } + } + if s.Type == StateTypeSwitch { + if s.SwitchState.DefaultCondition.Transition != nil { + s.SwitchState.DefaultCondition.Transition.stateParent = s + } + for _, e := range s.SwitchState.EventConditions { + if e.Transition != nil { + e.Transition.stateParent = s + } + } + for _, d := range s.SwitchState.DataConditions { + if d.Transition != nil { + d.Transition.stateParent = s + } + } } } - if len(states) == 1 { + contextValue := ValidatorContext{} + contextValue.init(workflow) + + return context.WithValue(context.Background(), ValidatorContextValue, contextValue) +} + +func init() { + // TODO: create states graph to complex check + + // val.GetValidator().RegisterStructValidationCtx(val.ValidationWrap(nil, workflowStructLevelValidation), Workflow{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onErrorStructLevelValidationCtx), OnError{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(transitionStructLevelValidationCtx), Transition{}) + val.GetValidator().RegisterStructValidationCtx(ValidationWrap(startStructLevelValidationCtx), Start{}) +} + +func startStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { + start := structLevel.Current().Interface().(Start) + if start.StateName != "" && !ctx.ExistState(start.StateName) { + structLevel.ReportError(start.StateName, "StateName", "stateName", val.TagExists, "") + return + } +} + +func onErrorStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { + onError := structLevel.Current().Interface().(OnError) + hasErrorRef := onError.ErrorRef != "" + hasErrorRefs := len(onError.ErrorRefs) > 0 + + if !hasErrorRef && !hasErrorRefs { + structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagRequired, "") + } else if hasErrorRef && hasErrorRefs { + structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExclusive, "") return } + if onError.ErrorRef != "" && !ctx.ExistError(onError.ErrorRef) { + structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExists, "") + } + + for _, errorRef := range onError.ErrorRefs { + if !ctx.ExistError(errorRef) { + structLevel.ReportError(onError.ErrorRefs, "ErrorRefs", "ErrorRefs", val.TagExists, "") + } + } +} + +func transitionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { // Naive check if transitions exist - for _, state := range statesMap { - if state.Transition != nil { - if _, ok := statesMap[state.Transition.NextState]; !ok { - structLevel.ReportError(reflect.ValueOf(state), "Transition", "transition", "transitionnotexists", state.Transition.NextState) + transition := structLevel.Current().Interface().(Transition) + if ctx.ExistState(transition.NextState) { + if transition.stateParent != nil { + parentBaseState := transition.stateParent + + if parentBaseState.Name == transition.NextState { + // TODO: Improve recursive check + structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagRecursiveState, parentBaseState.Name) + } + + if parentBaseState.UsedForCompensation && !ctx.States[transition.NextState].BaseState.UsedForCompensation { + structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionUseForCompensation, "") + } + + if !parentBaseState.UsedForCompensation && ctx.States[transition.NextState].BaseState.UsedForCompensation { + structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionMainWorkflow, "") } } - } - // TODO: create states graph to complex check + } else { + structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagExists, "") + } } -func validTransitionAndEnd(structLevel validator.StructLevel, field interface{}, transition *Transition, end *End) { +func validTransitionAndEnd(structLevel validator.StructLevel, field any, transition *Transition, end *End) { hasTransition := transition != nil isEnd := end != nil && (end.Terminate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end if !hasTransition && !isEnd { - structLevel.ReportError(field, "Transition", "transition", "required", "must have one of transition, end") + structLevel.ReportError(field, "Transition", "transition", val.TagRequired, "") } else if hasTransition && isEnd { - structLevel.ReportError(field, "Transition", "transition", "exclusive", "must have one of transition, end") + structLevel.ReportError(field, "Transition", "transition", val.TagExclusive, "") + } +} + +func validationNotExclusiveParamters(values []bool) bool { + hasOne := false + hasTwo := false + + for i, val1 := range values { + if val1 { + hasOne = true + for j, val2 := range values { + if i != j && val2 { + hasTwo = true + break + } + } + break + } } + + return hasOne && hasTwo } diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go index c305898..10e935a 100644 --- a/model/workflow_validator_test.go +++ b/model/workflow_validator_test.go @@ -22,216 +22,487 @@ import ( val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -var workflowStructDefault = Workflow{ - BaseWorkflow: BaseWorkflow{ - ID: "id", - SpecVersion: "0.8", - Auth: Auths{ - { - Name: "auth name", +func buildWorkflow() *Workflow { + return &Workflow{ + BaseWorkflow: BaseWorkflow{ + ID: "id", + Key: "key", + Name: "name", + SpecVersion: "0.8", + Version: "0.1", + ExpressionLang: JqExpressionLang, + }, + } +} + +func buildEndByState(state *State, terminate, compensate bool) *End { + end := &End{ + Terminate: terminate, + Compensate: compensate, + } + state.BaseState.End = end + return end +} + +func buildEndByDefaultCondition(defaultCondition *DefaultCondition, terminate, compensate bool) *End { + end := &End{ + Terminate: terminate, + Compensate: compensate, + } + defaultCondition.End = end + return end +} + +func buildEndByDataCondition(dataCondition *DataCondition, terminate, compensate bool) *End { + end := &End{ + Terminate: terminate, + Compensate: compensate, + } + dataCondition.End = end + return end +} + +func buildEndByEventCondition(eventCondition *EventCondition, terminate, compensate bool) *End { + end := &End{ + Terminate: terminate, + Compensate: compensate, + } + eventCondition.End = end + return end +} + +func buildStart(workflow *Workflow, state *State) { + start := &Start{ + StateName: state.BaseState.Name, + } + workflow.BaseWorkflow.Start = start +} + +func buildTransitionByState(state, nextState *State, compensate bool) { + state.BaseState.Transition = &Transition{ + NextState: nextState.BaseState.Name, + Compensate: compensate, + } +} + +func buildTransitionByDataCondition(dataCondition *DataCondition, state *State, compensate bool) { + dataCondition.Transition = &Transition{ + NextState: state.BaseState.Name, + Compensate: compensate, + } +} + +func buildTransitionByEventCondition(eventCondition *EventCondition, state *State, compensate bool) { + eventCondition.Transition = &Transition{ + NextState: state.BaseState.Name, + Compensate: compensate, + } +} + +func buildTransitionByDefaultCondition(defaultCondition *DefaultCondition, state *State) { + defaultCondition.Transition = &Transition{ + NextState: state.BaseState.Name, + } +} + +func buildTimeouts(workflow *Workflow) *Timeouts { + timeouts := Timeouts{} + workflow.BaseWorkflow.Timeouts = &timeouts + return workflow.BaseWorkflow.Timeouts +} + +func TestBaseWorkflowStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, }, - Start: &Start{ - StateName: "name state", + { + Desp: "id exclude key", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.ID = "id" + model.Key = "" + return *model + }, }, - }, - States: []State{ { - BaseState: BaseState{ - Name: "name state", - Type: StateTypeOperation, - Transition: &Transition{ - NextState: "next name state", - }, + Desp: "key exclude id", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.ID = "" + model.Key = "key" + return *model }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{ - {}, - }, + }, + { + Desp: "without id and key", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.ID = "" + model.Key = "" + return *model + }, + Err: `workflow.id required when "workflow.key" is not defined +workflow.key required when "workflow.id" is not defined`, + }, + { + Desp: "oneofkind", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.BaseWorkflow.ExpressionLang = JqExpressionLang + "invalid" + return *model }, + Err: `workflow.expressionLang need by one of [jq jsonpath]`, }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestContinueAsStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + baseWorkflow.States[0].BaseState.End.ContinueAs = &ContinueAs{ + WorkflowID: "sub workflow", + WorkflowExecTimeout: WorkflowExecTimeout{ + Duration: "P1M", + }, + } + + testCases := []ValidationCase{ { - BaseState: BaseState{ - Name: "next name state", - Type: StateTypeOperation, - End: &End{ - Terminate: true, - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{ - {}, - }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.End.ContinueAs.WorkflowID = "" + return *model }, + Err: `workflow.states[0].end.continueAs.workflowID is required`, }, - }, + } + + StructLevelValidationCtx(t, testCases) } -var listStateTransition1 = []State{ - { - BaseState: BaseState{ - Name: "name state", - Type: StateTypeOperation, - Transition: &Transition{ - NextState: "next name state", +func TestOnErrorStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + baseWorkflow.BaseWorkflow.Errors = Errors{{ + Name: "error 1", + }, { + Name: "error 2", + }} + baseWorkflow.States[0].BaseState.OnErrors = []OnError{{ + ErrorRef: "error 1", + }, { + ErrorRefs: []string{"error 1", "error 2"}, + }} + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{{}}, - }, - }, - { - BaseState: BaseState{ - Name: "next name state", - Type: StateTypeOperation, - Transition: &Transition{ - NextState: "next name state 2", - }, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{{}}, - }, - }, - { - BaseState: BaseState{ - Name: "next name state 2", - Type: StateTypeOperation, - End: &End{ - Terminate: true, - }, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - Actions: []Action{{}}, - }, - }, -} - -func TestWorkflowStructLevelValidation(t *testing.T) { - type testCase[T any] struct { - name string - instance T - err string - } - testCases := []testCase[any]{ - { - name: "workflow success", - instance: workflowStructDefault, - }, - { - name: "workflow auth.name repeat", - instance: func() Workflow { - w := workflowStructDefault - w.Auth = append(w.Auth, w.Auth[0]) - return w - }(), - err: `Key: 'Workflow.[]Auth.Name' Error:Field validation for '[]Auth.Name' failed on the 'reqnameunique' tag`, - }, { - name: "workflow id exclude key", - instance: func() Workflow { - w := workflowStructDefault - w.ID = "id" - w.Key = "" - return w - }(), - err: ``, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.OnErrors[0].ErrorRef = "" + return *model + }, + Err: `workflow.states[0].onErrors[0].errorRef is required`, }, { - name: "workflow key exclude id", - instance: func() Workflow { - w := workflowStructDefault - w.ID = "" - w.Key = "key" - return w - }(), - err: ``, + Desp: "exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OnErrors[0].ErrorRef = "error 1" + model.States[0].OnErrors[0].ErrorRefs = []string{"error 2"} + return *model + }, + Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, }, { - name: "workflow id and key", - instance: func() Workflow { - w := workflowStructDefault - w.ID = "id" - w.Key = "key" - return w - }(), - err: ``, + Desp: "exists and exclusive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" + model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} + return *model + }, + Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, }, { - name: "workflow without id and key", - instance: func() Workflow { - w := workflowStructDefault - w.ID = "" - w.Key = "" - return w - }(), - err: `Key: 'Workflow.BaseWorkflow.ID' Error:Field validation for 'ID' failed on the 'required_without' tag -Key: 'Workflow.BaseWorkflow.Key' Error:Field validation for 'Key' failed on the 'required_without' tag`, - }, - { - name: "workflow start", - instance: func() Workflow { - w := workflowStructDefault - w.Start = &Start{ - StateName: "start state not found", - } - return w - }(), - err: `Key: 'Workflow.Start' Error:Field validation for 'Start' failed on the 'startnotexist' tag`, + Desp: "exists errorRef", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" + return *model + }, + Err: `workflow.states[0].onErrors[0].errorRef don't exist "invalid error name"`, + }, + { + Desp: "exists errorRefs", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.OnErrors[0].ErrorRef = "" + model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} + return *model + }, + Err: `workflow.states[0].onErrors[0].errorRefs don't exist ["invalid error name"]`, }, { - name: "workflow states transitions", - instance: func() Workflow { - w := workflowStructDefault - w.States = listStateTransition1 - return w - }(), - err: ``, + Desp: "duplicate", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].OnErrors[1].ErrorRefs = []string{"error 1", "error 1"} + return *model + }, + Err: `workflow.states[0].onErrors[1].errorRefs has duplicate value`, }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestStartStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildStart(baseWorkflow, operationState) + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ { - name: "valid ContinueAs", - instance: ContinueAs{ - WorkflowID: "another-test", - Version: "2", - Data: FromString("${ del(.customerCount) }"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "PT1H", - Interrupt: false, - RunBefore: "test", - }, + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() }, - err: ``, }, { - name: "invalid WorkflowExecTimeout", - instance: ContinueAs{ - WorkflowID: "test", - Version: "1", - Data: FromString("${ del(.customerCount) }"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "invalid", - }, + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Start.StateName = "" + return *model }, - err: `Key: 'ContinueAs.workflowExecTimeout' Error:Field validation for 'workflowExecTimeout' failed on the 'iso8601duration' tag`, + Err: `workflow.start.stateName is required`, + }, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Start.StateName = "start state not found" + return *model + }, + Err: `workflow.start.stateName don't exist "start state not found"`, }, } - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - err := val.GetValidator().Struct(tc.instance) + StructLevelValidationCtx(t, testCases) +} + +func TestTransitionStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + baseWorkflow.States = make(States, 0, 5) + + operationState := buildOperationState(baseWorkflow, "start state") + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + operationState2 := buildOperationState(baseWorkflow, "next state") + buildEndByState(operationState2, true, false) + operationState2.BaseState.CompensatedBy = "compensation next state 1" + action2 := buildActionByOperationState(operationState2, "action 1") + buildFunctionRef(baseWorkflow, action2, "function 2") + + buildTransitionByState(operationState, operationState2, false) + + operationState3 := buildOperationState(baseWorkflow, "compensation next state 1") + operationState3.BaseState.UsedForCompensation = true + action3 := buildActionByOperationState(operationState3, "action 1") + buildFunctionRef(baseWorkflow, action3, "function 3") + + operationState4 := buildOperationState(baseWorkflow, "compensation next state 2") + operationState4.BaseState.UsedForCompensation = true + action4 := buildActionByOperationState(operationState4, "action 1") + buildFunctionRef(baseWorkflow, action4, "function 4") + + buildTransitionByState(operationState3, operationState4, false) + + operationState5 := buildOperationState(baseWorkflow, "compensation next state 3") + buildEndByState(operationState5, true, false) + operationState5.BaseState.UsedForCompensation = true + action5 := buildActionByOperationState(operationState5, "action 5") + buildFunctionRef(baseWorkflow, action5, "function 5") + + buildTransitionByState(operationState4, operationState5, false) + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() + }, + }, + { + Desp: "state recursive", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.Transition.NextState = model.States[0].BaseState.Name + return *model + }, + Err: `workflow.states[0].transition.nextState can't no be recursive "start state"`, + }, + { + Desp: "exists", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.Transition.NextState = "invalid next state" + return *model + }, + Err: `workflow.states[0].transition.nextState don't exist "invalid next state"`, + }, + { + Desp: "transitionusedforcompensation", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[3].BaseState.UsedForCompensation = false + return *model + }, + Err: `Key: 'Workflow.States[2].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transitionusedforcompensation' tag +Key: 'Workflow.States[3].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, + }, + { + Desp: "transtionmainworkflow", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.States[0].BaseState.Transition.NextState = model.States[3].BaseState.Name + return *model + }, + Err: `Key: 'Workflow.States[0].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestSecretsStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "workflow secrets.name repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Secrets = []string{"secret 1", "secret 1"} + return *model + }, + Err: `workflow.secrets has duplicate value`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +func TestErrorStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") - if tc.err != "" { - assert.Error(t, err) - if err != nil { - assert.Equal(t, tc.err, err.Error()) + baseWorkflow.BaseWorkflow.Errors = Errors{{ + Name: "error 1", + }, { + Name: "error 2", + }} + + testCases := []ValidationCase{ + { + Desp: "success", + Model: func() Workflow { + return *baseWorkflow.DeepCopy() + }, + }, + { + Desp: "required", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Errors[0].Name = "" + return *model + }, + Err: `workflow.errors[0].name is required`, + }, + { + Desp: "repeat", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.Errors = Errors{model.Errors[0], model.Errors[0]} + return *model + }, + Err: `workflow.errors has duplicate "name"`, + }, + } + + StructLevelValidationCtx(t, testCases) +} + +type ValidationCase struct { + Desp string + Model func() Workflow + Err string +} + +func StructLevelValidationCtx(t *testing.T, testCases []ValidationCase) { + for _, tc := range testCases { + t.Run(tc.Desp, func(t *testing.T) { + model := tc.Model() + err := val.GetValidator().StructCtx(NewValidatorContext(&model), model) + err = val.WorkflowError(err) + if tc.Err != "" { + if assert.Error(t, err) { + assert.Equal(t, tc.Err, err.Error()) } - return + } else { + assert.NoError(t, err) } - assert.NoError(t, err) }) } } diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index d04a11b..804706f 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -747,6 +747,28 @@ func (in *EventCondition) DeepCopy() *EventCondition { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in EventConditions) DeepCopyInto(out *EventConditions) { + { + in := &in + *out = make(EventConditions, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventConditions. +func (in EventConditions) DeepCopy() EventConditions { + if in == nil { + return nil + } + out := new(EventConditions) + in.DeepCopyInto(out) + return *out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *EventDataFilter) DeepCopyInto(out *EventDataFilter) { *out = *in @@ -1567,7 +1589,7 @@ func (in *SwitchState) DeepCopyInto(out *SwitchState) { in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) if in.EventConditions != nil { in, out := &in.EventConditions, &out.EventConditions - *out = make([]EventCondition, len(*in)) + *out = make(EventConditions, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1647,6 +1669,11 @@ func (in *Timeouts) DeepCopy() *Timeouts { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Transition) DeepCopyInto(out *Transition) { *out = *in + if in.stateParent != nil { + in, out := &in.stateParent, &out.stateParent + *out = new(State) + (*in).DeepCopyInto(*out) + } if in.ProduceEvents != nil { in, out := &in.ProduceEvents, &out.ProduceEvents *out = make([]ProduceEvent, len(*in)) @@ -1667,13 +1694,64 @@ func (in *Transition) DeepCopy() *Transition { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ValidatorContext) DeepCopyInto(out *ValidatorContext) { + *out = *in + if in.States != nil { + in, out := &in.States, &out.States + *out = make(map[string]State, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Functions != nil { + in, out := &in.Functions, &out.Functions + *out = make(map[string]Function, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Events != nil { + in, out := &in.Events, &out.Events + *out = make(map[string]Event, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Retries != nil { + in, out := &in.Retries, &out.Retries + *out = make(map[string]Retry, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.Errors != nil { + in, out := &in.Errors, &out.Errors + *out = make(map[string]Error, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidatorContext. +func (in *ValidatorContext) DeepCopy() *ValidatorContext { + if in == nil { + return nil + } + out := new(ValidatorContext) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Workflow) DeepCopyInto(out *Workflow) { *out = *in in.BaseWorkflow.DeepCopyInto(&out.BaseWorkflow) if in.States != nil { in, out := &in.States, &out.States - *out = make([]State, len(*in)) + *out = make(States, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } diff --git a/parser/parser.go b/parser/parser.go index fe9972d..fc50692 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -21,10 +21,10 @@ import ( "path/filepath" "strings" - "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" "github.com/serverlessworkflow/sdk-go/v2/model" - "sigs.k8s.io/yaml" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) const ( @@ -50,7 +50,9 @@ func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { if err := json.Unmarshal(source, workflow); err != nil { return nil, err } - if err := validator.GetValidator().Struct(workflow); err != nil { + + ctx := model.NewValidatorContext(workflow) + if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { return nil, err } return workflow, nil diff --git a/parser/parser_test.go b/parser/parser_test.go index be0ac4d..b52840e 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -21,12 +21,12 @@ import ( "strings" "testing" - "k8s.io/apimachinery/pkg/util/intstr" - "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" "github.com/serverlessworkflow/sdk-go/v2/model" "github.com/serverlessworkflow/sdk-go/v2/test" + "github.com/serverlessworkflow/sdk-go/v2/util" ) func TestBasicValidation(t *testing.T) { @@ -34,7 +34,7 @@ func TestBasicValidation(t *testing.T) { files, err := os.ReadDir(rootPath) assert.NoError(t, err) - model.SetIncludePaths(append(model.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) + util.SetIncludePaths(append(util.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) for _, file := range files { if !file.IsDir() { @@ -350,7 +350,7 @@ func TestFromFile(t *testing.T) { "./testdata/workflows/purchaseorderworkflow.sw.json", func(t *testing.T, w *model.Workflow) { assert.Equal(t, "Purchase Order Workflow", w.Name) assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "P30D", w.Timeouts.WorkflowExecTimeout.Duration) assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) }, }, { @@ -393,7 +393,7 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, w.Functions[2]) assert.Equal(t, "greetingFunction", w.Functions[2].Name) - assert.Empty(t, w.Functions[2].Type) + assert.Equal(t, model.FunctionTypeREST, w.Functions[2].Type) assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) // Delay state @@ -465,7 +465,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, &model.Transition{NextState: "HandleNoVisaDecision"}, w.States[3].SwitchState.DefaultCondition.Transition) + assert.Equal(t, "HandleNoVisaDecision", w.States[3].SwitchState.DefaultCondition.Transition.NextState) // DataBasedSwitchState dataBased := w.States[4].SwitchState @@ -474,9 +474,7 @@ func TestFromFile(t *testing.T) { dataCondition := dataBased.DataConditions[0] assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) - assert.Equal(t, &model.Transition{ - NextState: "RejectApplication", - }, w.States[4].DefaultCondition.Transition) + assert.Equal(t, "RejectApplication", w.States[4].DefaultCondition.Transition.NextState) assert.Equal(t, "PT1S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT2S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Single) @@ -489,9 +487,10 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) assert.NotNil(t, w.States[5].OperationState.Actions[0].FunctionRef) assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].FunctionRef.RefName) - assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) - assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) - assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) + + // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) + // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) + // assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) assert.Equal(t, "PT1H", w.States[5].OperationState.Timeouts.ActionExecTimeout) assert.Equal(t, "PT1S", w.States[5].OperationState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT2S", w.States[5].OperationState.Timeouts.StateExecTimeout.Single) @@ -514,9 +513,9 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "sendTextFunction", w.States[6].ForEachState.Actions[0].FunctionRef.RefName) assert.Equal(t, map[string]model.Object{"message": model.FromString("${ .singlemessage }")}, w.States[6].ForEachState.Actions[0].FunctionRef.Arguments) - assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) - assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) - assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) + // assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) + // assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) + // assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) assert.Equal(t, "PT11H", w.States[6].ForEachState.Timeouts.ActionExecTimeout) assert.Equal(t, "PT11S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Total) @@ -744,6 +743,22 @@ auth: metadata: auth1: auth1 auth2: auth2 +events: +- name: StoreBidFunction + type: store +- name: CarBidEvent + type: store +- name: visaRejectedEvent + type: store +- name: visaApprovedEventRef + type: store +functions: +- name: callCreditCheckMicroservice + operation: http://myapis.org/creditcheck.json#checkCredit +- name: StoreBidFunction + operation: http://myapis.org/storebid.json#storeBid +- name: sendTextFunction + operation: http://myapis.org/inboxapi.json#sendText states: - name: GreetDelay type: delay @@ -848,11 +863,6 @@ states: refName: sendTextFunction arguments: message: "${ .singlemessage }" - eventRef: - triggerEventRef: example1 - resultEventRef: example2 - # Added "resultEventTimeout" for action eventref - resultEventTimeout: PT12H timeouts: actionExecTimeout: PT11H stateExecTimeout: @@ -910,9 +920,6 @@ states: - name: HandleApprovedVisa type: operation actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - eventRef: triggerEventRef: StoreBidFunction data: "${ .patientInfo }" @@ -926,13 +933,28 @@ states: stateExecTimeout: total: PT33M single: PT123M + transition: HandleApprovedVisaSubFlow +- name: HandleApprovedVisaSubFlow + type: operation + actions: + - subFlowRef: + workflowId: handleApprovedVisaWorkflowID + name: subFlowRefName + end: + terminate: true +- name: HandleRejectedVisa + type: operation + actions: + - subFlowRef: + workflowId: handleApprovedVisaWorkflowID + name: subFlowRefName end: terminate: true `)) - assert.Nil(t, err) + assert.NoError(t, err) assert.NotNil(t, workflow) b, err := json.Marshal(workflow) - assert.Nil(t, err) + assert.NoError(t, err) // workflow and auth metadata assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) @@ -942,7 +964,7 @@ states: assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"},\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) // Operation State - assert.True(t, strings.Contains(string(b), "{\"name\":\"HandleApprovedVisa\",\"type\":\"operation\",\"end\":{\"terminate\":true},\"actionMode\":\"sequential\",\"actions\":[{\"name\":\"subFlowRefName\",\"subFlowRef\":{\"workflowId\":\"handleApprovedVisaWorkflowID\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"eventRefName\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .customer }\",\"time\":50},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT123M\",\"total\":\"PT33M\"},\"actionExecTimeout\":\"PT777S\"}}")) + assert.True(t, strings.Contains(string(b), `{"name":"HandleApprovedVisa","type":"operation","transition":{"nextState":"HandleApprovedVisaSubFlow"},"actionMode":"sequential","actions":[{"name":"eventRefName","eventRef":{"triggerEventRef":"StoreBidFunction","resultEventRef":"StoreBidFunction","data":"${ .patientInfo }","contextAttributes":{"customer":"${ .customer }","time":50},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"timeouts":{"stateExecTimeout":{"single":"PT123M","total":"PT33M"},"actionExecTimeout":"PT777S"}}`)) // Delay State assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"StoreCarAuctionBid\"},\"timeDelay\":\"PT5S\"}")) @@ -960,7 +982,7 @@ states: assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"dataConditions\":[{\"condition\":\"${ true }\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) // Foreach State - assert.True(t, strings.Contains(string(b), "{\"name\":\"SendTextForHighPriority\",\"type\":\"foreach\",\"transition\":{\"nextState\":\"HelloInject\"},\"inputCollection\":\"${ .messages }\",\"outputCollection\":\"${ .outputMessages }\",\"iterationParam\":\"${ .this }\",\"batchSize\":45,\"actions\":[{\"name\":\"test\",\"functionRef\":{\"refName\":\"sendTextFunction\",\"arguments\":{\"message\":\"${ .singlemessage }\"},\"invoke\":\"sync\"},\"eventRef\":{\"triggerEventRef\":\"example1\",\"resultEventRef\":\"example2\",\"resultEventTimeout\":\"PT12H\",\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"mode\":\"sequential\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22S\",\"total\":\"PT11S\"},\"actionExecTimeout\":\"PT11H\"}}")) + assert.True(t, strings.Contains(string(b), `{"name":"SendTextForHighPriority","type":"foreach","transition":{"nextState":"HelloInject"},"inputCollection":"${ .messages }","outputCollection":"${ .outputMessages }","iterationParam":"${ .this }","batchSize":45,"actions":[{"name":"test","functionRef":{"refName":"sendTextFunction","arguments":{"message":"${ .singlemessage }"},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"mode":"sequential","timeouts":{"stateExecTimeout":{"single":"PT22S","total":"PT11S"},"actionExecTimeout":"PT11H"}}`)) // Inject State assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"transition\":{\"nextState\":\"WaitForCompletionSleep\"},\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) diff --git a/parser/testdata/workflows/customerbankingtransactions.json b/parser/testdata/workflows/customerbankingtransactions.json index 933c7e4..98fbd34 100644 --- a/parser/testdata/workflows/customerbankingtransactions.json +++ b/parser/testdata/workflows/customerbankingtransactions.json @@ -35,7 +35,7 @@ "operation": "banking.yaml#largerTransation" }, { - "name": "Banking Service - Smaller T", + "name": "Banking Service - Smaller Tx", "type": "asyncapi", "operation": "banking.yaml#smallerTransation" } diff --git a/parser/testdata/workflows/customercreditcheck.json b/parser/testdata/workflows/customercreditcheck.json index d19c009..8a3914f 100644 --- a/parser/testdata/workflows/customercreditcheck.json +++ b/parser/testdata/workflows/customercreditcheck.json @@ -13,6 +13,10 @@ { "name": "sendRejectionEmailFunction", "operation": "http://myapis.org/creditcheckapi.json#rejectionEmail" + }, + { + "name": "callCreditCheckMicroservice", + "operation": "http://myapis.org/creditcheckapi.json#creditCheckMicroservice" } ], "events": [ diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json index df9d7dd..80e81b0 100644 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json @@ -23,6 +23,10 @@ { "name": "greetingFunction", "operation": "file://myapis/greetingapis.json#greeting" + }, + { + "name": "greetingFunction2", + "operation": "file://myapis/greetingapis.json#greeting2" } ], "states": [ diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml index ff4b21f..015a711 100644 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ b/parser/testdata/workflows/greetings-v08-spec.sw.yaml @@ -28,6 +28,23 @@ functions: type: graphql - name: greetingFunction operation: file://myapis/greetingapis.json#greeting + - name: StoreBidFunction + operation: http://myapis.org/inboxapi.json#storeBidFunction + - name: callCreditCheckMicroservice + operation: http://myapis.org/inboxapi.json#callCreditCheckMicroservice +events: + - name: StoreBidFunction + type: StoreBidFunction + source: StoreBidFunction + - name: CarBidEvent + type: typeCarBidEvent + source: sourceCarBidEvent + - name: visaApprovedEventRef + type: typeVisaApprovedEventRef + source: sourceVisaApprovedEventRef + - name: visaRejectedEvent + type: typeVisaRejectedEvent + source: sourceVisaRejectedEvent states: - name: GreetDelay type: delay @@ -129,11 +146,6 @@ states: name: "${ .greet | .name }" actionDataFilter: dataResultsPath: "${ .payload | .greeting }" - eventRef: - triggerEventRef: example - resultEventRef: example - # Added "resultEventTimeout" for action eventref - resultEventTimeout: PT1H timeouts: actionExecTimeout: PT1H stateExecTimeout: @@ -155,11 +167,6 @@ states: refName: sendTextFunction arguments: message: "${ .singlemessage }" - eventRef: - triggerEventRef: example1 - resultEventRef: example2 - # Added "resultEventTimeout" for action eventref - resultEventTimeout: PT12H timeouts: actionExecTimeout: PT11H stateExecTimeout: @@ -221,4 +228,46 @@ states: transition: nextState: HandleRejectedVisa defaultCondition: SendTextForHighPriority - end: true \ No newline at end of file + end: true + - name: RejectApplication + type: switch + dataConditions: + - condition: ${ true } + transition: HandleApprovedVisa + - condition: ${ false } + transition: + nextState: HandleRejectedVisa + defaultCondition: SendTextForHighPriority + end: true + - name: HandleNoVisaDecision + type: operation + actionMode: sequential + actions: + - name: greetingCustomFunction + functionRef: + refName: greetingCustomFunction + end: true + - name: StartApplication + type: operation + actionMode: sequential + actions: + - name: greetingCustomFunction + functionRef: + refName: greetingCustomFunction + end: true + - name: HandleApprovedVisa + type: operation + actionMode: sequential + actions: + - name: greetingCustomFunction + functionRef: + refName: greetingCustomFunction + end: true + - name: HandleRejectedVisa + type: operation + actionMode: sequential + actions: + - name: greetingCustomFunction + functionRef: + refName: greetingCustomFunction + end: true diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml index c2a5808..6ceb1a1 100644 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ b/parser/testdata/workflows/patientonboarding.sw.yaml @@ -41,10 +41,12 @@ states: end: true end: true events: - - name: StorePatient + - name: NewPatientEvent type: new.patients.event source: newpatient/+ functions: + - name: StorePatient + operation: api/services.json#storePatient - name: StoreNewPatientInfo operation: api/services.json#addPatient - name: AssignDoctor diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json index 2bde03c..2596b04 100644 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ b/parser/testdata/workflows/purchaseorderworkflow.sw.json @@ -6,7 +6,7 @@ "start": "StartNewOrder", "timeouts": { "workflowExecTimeout": { - "duration": "PT30D", + "duration": "P30D", "runBefore": "CancelOrder" } }, diff --git a/parser/testdata/workflows/vitalscheck.json b/parser/testdata/workflows/vitalscheck.json index feb1c41..3a89b78 100644 --- a/parser/testdata/workflows/vitalscheck.json +++ b/parser/testdata/workflows/vitalscheck.json @@ -34,19 +34,19 @@ ], "functions": [ { - "name": "checkTirePressure", + "name": "Check Tire Pressure", "operation": "mycarservices.json#checktirepressure" }, { - "name": "checkOilPressure", + "name": "Check Oil Pressure", "operation": "mycarservices.json#checkoilpressure" }, { - "name": "checkCoolantLevel", + "name": "Check Coolant Level", "operation": "mycarservices.json#checkcoolantlevel" }, { - "name": "checkBattery", + "name": "Check Battery", "operation": "mycarservices.json#checkbattery" } ] diff --git a/model/util.go b/util/unmarshal.go similarity index 91% rename from model/util.go rename to util/unmarshal.go index 645b4f5..6c70f4a 100644 --- a/model/util.go +++ b/util/unmarshal.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package util import ( "bytes" @@ -28,8 +28,9 @@ import ( "sync/atomic" "time" - "github.com/serverlessworkflow/sdk-go/v2/validator" "sigs.k8s.io/yaml" + + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) // Kind ... @@ -40,7 +41,7 @@ type Kind interface { } // TODO: Remove global variable -var httpClient = http.Client{Timeout: time.Duration(1) * time.Second} +var HttpClient = http.Client{Timeout: time.Duration(1) * time.Second} // UnmarshalError ... // +k8s:deepcopy-gen=false @@ -88,8 +89,8 @@ func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) str } else if err.Struct != "" && err.Field != "" { var primitiveTypeName string - val := reflect.New(err.Type) - if valKinds, ok := val.Elem().Interface().(validator.Kind); ok { + value := reflect.New(err.Type) + if valKinds, ok := value.Elem().Interface().(val.Kind); ok { values := valKinds.KindValues() if len(values) <= 2 { primitiveTypeName = strings.Join(values, " or ") @@ -174,7 +175,7 @@ func getBytesFromHttp(url string) ([]byte, error) { return nil, err } - resp, err := httpClient.Do(req) + resp, err := HttpClient.Do(req) if err != nil { return nil, err } @@ -188,9 +189,10 @@ func getBytesFromHttp(url string) ([]byte, error) { return buf.Bytes(), nil } -func unmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { +// +k8s:deepcopy-gen=false +func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { var valString string - err := unmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) + err := UnmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) if err != nil || valString == "" { return err } @@ -229,10 +231,10 @@ func unmarshalObjectOrFile[U any](parameterName string, data []byte, valObject * } } - return unmarshalObject(parameterName, data, valObject) + return UnmarshalObject(parameterName, data, valObject) } -func unmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { +func UnmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { data = bytes.TrimSpace(data) if len(data) == 0 { // TODO: Normalize error messages @@ -242,7 +244,7 @@ func unmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, da isObject := data[0] == '{' || data[0] == '[' var err error if isObject { - err = unmarshalObject(parameterName, data, valStruct) + err = UnmarshalObject(parameterName, data, valStruct) } else { err = unmarshalPrimitive(parameterName, data, valPrimitive) } @@ -273,7 +275,7 @@ func unmarshalPrimitive[T string | bool](parameterName string, data []byte, valu return nil } -func unmarshalObject[U any](parameterName string, data []byte, value *U) error { +func UnmarshalObject[U any](parameterName string, data []byte, value *U) error { if value == nil { return nil } diff --git a/model/util_benchmark_test.go b/util/unmarshal_benchmark_test.go similarity index 98% rename from model/util_benchmark_test.go rename to util/unmarshal_benchmark_test.go index 4048a6b..1a81b41 100644 --- a/model/util_benchmark_test.go +++ b/util/unmarshal_benchmark_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package util import ( "fmt" diff --git a/model/util_test.go b/util/unmarshal_test.go similarity index 79% rename from model/util_test.go rename to util/unmarshal_test.go index b81b315..0227123 100644 --- a/model/util_test.go +++ b/util/unmarshal_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package util import ( "encoding/json" @@ -22,8 +22,9 @@ import ( "path/filepath" "testing" - "github.com/serverlessworkflow/sdk-go/v2/test" "github.com/stretchr/testify/assert" + + "github.com/serverlessworkflow/sdk-go/v2/test" ) func TestIncludePaths(t *testing.T) { @@ -55,7 +56,7 @@ func Test_loadExternalResource(t *testing.T) { } })) defer server.Close() - httpClient = *server.Client() + HttpClient = *server.Client() data, err := loadExternalResource(server.URL + "/test.json") assert.NoError(t, err) @@ -94,40 +95,26 @@ func Test_unmarshalObjectOrFile(t *testing.T) { } })) defer server.Close() - httpClient = *server.Client() + HttpClient = *server.Client() structValue := &structString{} data := []byte(`"fieldValue": "value"`) - err := unmarshalObjectOrFile("structString", data, structValue) + err := UnmarshalObjectOrFile("structString", data, structValue) assert.Error(t, err) assert.Equal(t, &structString{}, structValue) listStructValue := &listStructString{} data = []byte(`[{"fieldValue": "value"}]`) - err = unmarshalObjectOrFile("listStructString", data, listStructValue) + err = UnmarshalObjectOrFile("listStructString", data, listStructValue) assert.NoError(t, err) assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) listStructValue = &listStructString{} data = []byte(fmt.Sprintf(`"%s/test.json"`, server.URL)) - err = unmarshalObjectOrFile("listStructString", data, listStructValue) + err = UnmarshalObjectOrFile("listStructString", data, listStructValue) assert.NoError(t, err) assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) }) - - t.Run("file://", func(t *testing.T) { - retries := &Retries{} - data := []byte(`"file://../parser/testdata/applicationrequestretries.json"`) - err := unmarshalObjectOrFile("retries", data, retries) - assert.NoError(t, err) - }) - - t.Run("external url", func(t *testing.T) { - retries := &Retries{} - data := []byte(`"https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json"`) - err := unmarshalObjectOrFile("retries", data, retries) - assert.NoError(t, err) - }) } func Test_primitiveOrMapType(t *testing.T) { @@ -137,31 +124,31 @@ func Test_primitiveOrMapType(t *testing.T) { var valBool bool valMap := &dataMap{} data := []byte(`"value":true`) - err := unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.Error(t, err) valBool = false valMap = &dataMap{} data = []byte(`{value":true}`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.Error(t, err) valBool = false valMap = &dataMap{} data = []byte(`value":true}`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.Error(t, err) valBool = false valMap = &dataMap{} data = []byte(`"true"`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.Error(t, err) valBool = false valMap = &dataMap{} data = []byte(`true`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.NoError(t, err) assert.Equal(t, &dataMap{}, valMap) assert.True(t, valBool) @@ -169,7 +156,7 @@ func Test_primitiveOrMapType(t *testing.T) { valString := "" valMap = &dataMap{} data = []byte(`"true"`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) assert.NoError(t, err) assert.Equal(t, &dataMap{}, valMap) assert.Equal(t, `true`, valString) @@ -177,7 +164,7 @@ func Test_primitiveOrMapType(t *testing.T) { valBool = false valMap = &dataMap{} data = []byte(`{"value":true}`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.NoError(t, err) assert.NotNil(t, valMap) assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) @@ -186,7 +173,7 @@ func Test_primitiveOrMapType(t *testing.T) { valBool = false valMap = &dataMap{} data = []byte(`{"value": "true"}`) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) assert.NoError(t, err) assert.NotNil(t, valMap) assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) @@ -201,12 +188,12 @@ func Test_primitiveOrMapType(t *testing.T) { var valString string valStruct := &structString{} data := []byte(`{"fieldValue": "value"`) - err := unmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) + err := UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) assert.Error(t, err) assert.Equal(t, "structBool has a syntax error \"unexpected end of JSON input\"", err.Error()) data = []byte(`{\n "fieldValue": value\n}`) - err = unmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) + err = UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) assert.Error(t, err) assert.Equal(t, "structBool has a syntax error \"invalid character '\\\\\\\\' looking for beginning of object key string\"", err.Error()) // assert.Equal(t, `structBool value '{"fieldValue": value}' is not supported, it has a syntax error "invalid character 'v' looking for beginning of value"`, err.Error()) @@ -222,14 +209,14 @@ func Test_primitiveOrMapType(t *testing.T) { data := []byte(`{ "fieldValue": "true" }`) - err := unmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) + err := UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) assert.Error(t, err) assert.Equal(t, "structBool.fieldValue must be bool", err.Error()) valBool = false valStruct = &structBool{} data = []byte(`"true"`) - err = unmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) + err = UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) assert.Error(t, err) assert.Equal(t, "structBool must be bool or object", err.Error()) }) @@ -238,19 +225,19 @@ func Test_primitiveOrMapType(t *testing.T) { var valBool bool valStruct := &dataMap{} data := []byte(` {"value": "true"} `) - err := unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) assert.NoError(t, err) valBool = false valStruct = &dataMap{} data = []byte(` true `) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) assert.NoError(t, err) valString := "" valStruct = &dataMap{} data = []byte(` "true" `) - err = unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) assert.NoError(t, err) }) @@ -258,13 +245,13 @@ func Test_primitiveOrMapType(t *testing.T) { valString := "" valStruct := &dataMap{} data := []byte(string('\t') + `"true"` + string('\t')) - err := unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) assert.NoError(t, err) valBool := false valStruct = &dataMap{} data = []byte(string('\t') + `true` + string('\t')) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) assert.NoError(t, err) }) @@ -272,13 +259,13 @@ func Test_primitiveOrMapType(t *testing.T) { valString := "" valStruct := &dataMap{} data := []byte(string('\n') + `"true"` + string('\n')) - err := unmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) + err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) assert.NoError(t, err) valBool := false valStruct = &dataMap{} data = []byte(string('\n') + `true` + string('\n')) - err = unmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) + err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) assert.NoError(t, err) }) @@ -299,5 +286,5 @@ type structBoolUnmarshal structBool func (s *structBool) UnmarshalJSON(data []byte) error { s.FieldValue = true - return unmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) + return UnmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) } diff --git a/validator/validator.go b/validator/validator.go index 846203d..1e77b36 100644 --- a/validator/validator.go +++ b/validator/validator.go @@ -16,9 +16,12 @@ package validator import ( "context" + "strconv" - validator "github.com/go-playground/validator/v10" "github.com/senseyeio/duration" + "k8s.io/apimachinery/pkg/util/intstr" + + validator "github.com/go-playground/validator/v10" ) // TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation @@ -42,7 +45,6 @@ func init() { if err != nil { panic(err) } - } // GetValidator gets the default validator.Validate reference @@ -72,3 +74,23 @@ func oneOfKind(fl validator.FieldLevel) bool { return false } + +func ValidateGt0IntStr(value *intstr.IntOrString) bool { + switch value.Type { + case intstr.Int: + if value.IntVal <= 0 { + return false + } + case intstr.String: + v, err := strconv.Atoi(value.StrVal) + if err != nil { + return false + } + + if v <= 0 { + return false + } + } + + return true +} diff --git a/validator/validator_test.go b/validator/validator_test.go index a0b273e..73ef555 100644 --- a/validator/validator_test.go +++ b/validator/validator_test.go @@ -18,6 +18,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "k8s.io/apimachinery/pkg/util/intstr" ) func TestValidateISO8601TimeDuration(t *testing.T) { @@ -115,3 +116,60 @@ func Test_oneOfKind(t *testing.T) { }) } + +func TestValidateIntStr(t *testing.T) { + + testCase := []struct { + Desp string + Test *intstr.IntOrString + Return bool + }{ + { + Desp: "success int", + Test: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 1, + }, + Return: true, + }, + { + Desp: "success string", + Test: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "1", + }, + Return: true, + }, + { + Desp: "fail int", + Test: &intstr.IntOrString{ + Type: intstr.Int, + IntVal: 0, + }, + Return: false, + }, + { + Desp: "fail string", + Test: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "0", + }, + Return: false, + }, + { + Desp: "fail invalid string", + Test: &intstr.IntOrString{ + Type: intstr.String, + StrVal: "aa", + }, + Return: false, + }, + } + + for _, c := range testCase { + t.Run(c.Desp, func(t *testing.T) { + valid := ValidateGt0IntStr(c.Test) + assert.Equal(t, c.Return, valid) + }) + } +} diff --git a/validator/workflow.go b/validator/workflow.go new file mode 100644 index 0000000..d5be7b5 --- /dev/null +++ b/validator/workflow.go @@ -0,0 +1,154 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validator + +import ( + "errors" + "fmt" + "reflect" + "strings" + + validator "github.com/go-playground/validator/v10" +) + +const ( + TagExists string = "exists" + TagRequired string = "required" + TagExclusive string = "exclusive" + + TagRecursiveState string = "recursivestate" + + // States referenced by compensatedBy (as well as any other states that they transition to) must obey following rules: + TagTransitionMainWorkflow string = "transtionmainworkflow" // They should not have any incoming transitions (should not be part of the main workflow control-flow logic) + TagCompensatedbyEventState string = "compensatedbyeventstate" // They cannot be an event state + TagRecursiveCompensation string = "recursivecompensation" // They cannot themselves set their compensatedBy property to true (compensation is not recursive) + TagCompensatedby string = "compensatedby" // They must define the usedForCompensation property and set it to true + TagTransitionUseForCompensation string = "transitionusedforcompensation" // They can transition only to states which also have their usedForCompensation property and set to true +) + +type WorkflowErrors []error + +func (e WorkflowErrors) Error() string { + errors := []string{} + for _, err := range []error(e) { + errors = append(errors, err.Error()) + } + return strings.Join(errors, "\n") +} + +func WorkflowError(err error) error { + if err == nil { + return nil + } + + var invalidErr *validator.InvalidValidationError + if errors.As(err, &invalidErr) { + return err + } + + var validationErrors validator.ValidationErrors + if !errors.As(err, &validationErrors) { + return err + } + + removeNamespace := []string{ + "BaseWorkflow", + "BaseState", + "OperationState", + } + + workflowErrors := []error{} + for _, err := range validationErrors { + // normalize namespace + namespaceList := strings.Split(err.Namespace(), ".") + normalizedNamespaceList := []string{} + for i := range namespaceList { + part := namespaceList[i] + if !contains(removeNamespace, part) { + part := strings.ToLower(part[:1]) + part[1:] + normalizedNamespaceList = append(normalizedNamespaceList, part) + } + } + namespace := strings.Join(normalizedNamespaceList, ".") + + switch err.Tag() { + case "unique": + if err.Param() == "" { + workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate value", namespace)) + } else { + workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate %q", namespace, strings.ToLower(err.Param()))) + } + case "min": + workflowErrors = append(workflowErrors, fmt.Errorf("%s must have the minimum %s", namespace, err.Param())) + case "required_without": + if namespace == "workflow.iD" { + workflowErrors = append(workflowErrors, errors.New("workflow.id required when \"workflow.key\" is not defined")) + } else if namespace == "workflow.key" { + workflowErrors = append(workflowErrors, errors.New("workflow.key required when \"workflow.id\" is not defined")) + } else if err.StructField() == "FunctionRef" { + workflowErrors = append(workflowErrors, fmt.Errorf("%s required when \"eventRef\" or \"subFlowRef\" is not defined", namespace)) + } else { + workflowErrors = append(workflowErrors, err) + } + case "oneofkind": + value := reflect.New(err.Type()).Elem().Interface().(Kind) + workflowErrors = append(workflowErrors, fmt.Errorf("%s need by one of %s", namespace, value.KindValues())) + case "gt0": + workflowErrors = append(workflowErrors, fmt.Errorf("%s must be greater than 0", namespace)) + case TagExists: + workflowErrors = append(workflowErrors, fmt.Errorf("%s don't exist %q", namespace, err.Value())) + case TagRequired: + workflowErrors = append(workflowErrors, fmt.Errorf("%s is required", namespace)) + case TagExclusive: + if err.StructField() == "ErrorRef" { + workflowErrors = append(workflowErrors, fmt.Errorf("%s or %s are exclusive", namespace, replaceLastNamespace(namespace, "errorRefs"))) + } else { + workflowErrors = append(workflowErrors, fmt.Errorf("%s exclusive", namespace)) + } + case TagCompensatedby: + workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is not defined as usedForCompensation", namespace, err.Value())) + case TagCompensatedbyEventState: + workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation and cannot be an event state", namespace, err.Value())) + case TagRecursiveCompensation: + workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation (cannot themselves set their compensatedBy)", namespace, err.Value())) + case TagRecursiveState: + workflowErrors = append(workflowErrors, fmt.Errorf("%s can't no be recursive %q", namespace, strings.ToLower(err.Param()))) + case TagISO8601Duration: + workflowErrors = append(workflowErrors, fmt.Errorf("%s invalid iso8601 duration %q", namespace, err.Value())) + default: + workflowErrors = append(workflowErrors, err) + } + } + + return WorkflowErrors(workflowErrors) +} + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} + +func replaceLastNamespace(namespace, replace string) string { + index := strings.LastIndex(namespace, ".") + if index == -1 { + return namespace + } + + return fmt.Sprintf("%s.%s", namespace[:index], replace) +} From 926a85d0269b0d1729bf2827a951efc609e804b4 Mon Sep 17 00:00:00 2001 From: ilewin <109073868+ilewin@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:24:52 +0200 Subject: [PATCH 071/110] #181 Skipping END and TRANSITION validation for usedForCompensation States (#182) Signed-off-by: apidbere Co-authored-by: apidbere --- model/states_validator.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/states_validator.go b/model/states_validator.go index 0ce87dc..1bb58e5 100644 --- a/model/states_validator.go +++ b/model/states_validator.go @@ -26,7 +26,7 @@ func init() { func baseStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { baseState := structLevel.Current().Interface().(BaseState) - if baseState.Type != StateTypeSwitch { + if baseState.Type != StateTypeSwitch && !baseState.UsedForCompensation { validTransitionAndEnd(structLevel, baseState, baseState.Transition, baseState.End) } From 796270cb1806a6e3a04135c96c8f2a31233dbd0e Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Thu, 24 Aug 2023 17:48:19 -0300 Subject: [PATCH 072/110] relax state action validation (#183) The purpose of this change is to align the workflow validationb between the Java and the GO SDK. Spec allows the action to be a empty list while in the GO SDK, if defined, must have at least one item. Signed-off-by: Spolti --- hack/deepcopy-gen.sh | 3 +-- model/foreach_state.go | 4 ++-- model/foreach_state_validator_test.go | 2 +- model/operation_state.go | 4 ++-- model/operation_state_validator_test.go | 2 +- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh index 8069d7e..f8d30f3 100755 --- a/hack/deepcopy-gen.sh +++ b/hack/deepcopy-gen.sh @@ -43,7 +43,6 @@ if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 "${GOPATH}/bin/deepcopy-gen" -v 1 \ --input-dirs ./model -O zz_generated.deepcopy \ - --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" \ - --output-base ./ + --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" "$@" fi diff --git a/model/foreach_state.go b/model/foreach_state.go index 7202614..3edb891 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -60,8 +60,8 @@ type ForEachState struct { // +optional BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` // Actions to be executed for each of the elements of inputCollection. - // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions,omitempty" validate:"required,min=1,dive"` + // +kubebuilder:validation:MinItems=0 + Actions []Action `json:"actions,omitempty" validate:"required,min=0,dive"` // State specific timeout. // +optional Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go index bc48a6c..8fb49d0 100644 --- a/model/foreach_state_validator_test.go +++ b/model/foreach_state_validator_test.go @@ -108,7 +108,7 @@ workflow.states[0].forEachState.mode is required`, model.States[0].ForEachState.Actions = []Action{} return *model }, - Err: `workflow.states[0].forEachState.actions must have the minimum 1`, + Err: ``, }, } diff --git a/model/operation_state.go b/model/operation_state.go index da523ea..8a88e3b 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -27,8 +27,8 @@ type OperationState struct { // +kubebuilder:default=sequential ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` // Actions to be performed - // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions" validate:"min=1,dive"` + // +kubebuilder:validation:MinItems=0 + Actions []Action `json:"actions" validate:"min=0,dive"` // State specific timeouts // +optional Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` diff --git a/model/operation_state_validator_test.go b/model/operation_state_validator_test.go index ead04a8..5da6dba 100644 --- a/model/operation_state_validator_test.go +++ b/model/operation_state_validator_test.go @@ -63,7 +63,7 @@ func TestOperationStateStructLevelValidation(t *testing.T) { model.States[0].OperationState.Actions = []Action{} return *model }, - Err: `workflow.states[0].actions must have the minimum 1`, + Err: ``, }, { Desp: "oneofkind", From cc4ca42ff97eeda88ef80bc6d16596fe3297cde0 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Fri, 25 Aug 2023 12:04:42 -0300 Subject: [PATCH 073/110] Adds small check to make sure that Action.FunctionRef is unmarshalled into struct (#184) Signed-off-by: Spolti --- parser/parser_test.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/parser/parser_test.go b/parser/parser_test.go index b52840e..c5cf0f0 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -992,6 +992,9 @@ states: workflow = nil err = json.Unmarshal(b, &workflow) + // Make sure that the Action FunctionRef is unmarshalled correctly + assert.Equal(t, model.FromString("${ .singlemessage }"), workflow.States[5].ForEachState.Actions[0].FunctionRef.Arguments["message"]) + assert.Equal(t, "sendTextFunction", workflow.States[5].ForEachState.Actions[0].FunctionRef.RefName) assert.Nil(t, err) }) From cd9930387bc4ac3a3302c086979a9094713680ce Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 28 Aug 2023 10:13:41 -0300 Subject: [PATCH 074/110] Fix spec and SDK version alignment (2.2.4) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 64ead0c..0bcad4f 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.2) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.2.4](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.4) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 6d5de0687e8457d587960e65b8f9f48edfe86490 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Fri, 1 Sep 2023 16:48:40 -0300 Subject: [PATCH 075/110] Add new types `float64, map, slice, nil` in object (#186) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add new types `float64, map, slice, nil` in object Signed-off-by: AndrΓ© R. de Miranda * Add function ToInterface convert from object to any Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- model/object.go | 203 ++++++++++++++++++++++++--------- model/object_test.go | 181 +++++++++++++++++++++++++++++ model/zz_generated.deepcopy.go | 17 ++- parser/parser_test.go | 28 +++-- 4 files changed, 367 insertions(+), 62 deletions(-) create mode 100644 model/object_test.go diff --git a/model/object.go b/model/object.go index 10f4395..b8360a7 100644 --- a/model/object.go +++ b/model/object.go @@ -15,9 +15,23 @@ package model import ( + "bytes" "encoding/json" "fmt" "math" + "strconv" +) + +type Type int8 + +const ( + Null Type = iota + String + Int + Float + Map + Slice + Bool ) // Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. @@ -29,80 +43,167 @@ import ( // - String - holds string values // - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is // parsed to int32 -// - raw - holds any not typed value, replaces the interface{} behavior. // // +kubebuilder:validation:Type=object type Object struct { - Type Type `json:"type,inline"` - IntVal int32 `json:"intVal,inline"` - StrVal string `json:"strVal,inline"` - RawValue json.RawMessage `json:"rawValue,inline"` - BoolValue bool `json:"boolValue,inline"` + Type Type `json:"type,inline"` + StringValue string `json:"strVal,inline"` + IntValue int32 `json:"intVal,inline"` + FloatValue float64 + MapValue map[string]Object + SliceValue []Object + BoolValue bool `json:"boolValue,inline"` } -type Type int64 +// UnmarshalJSON implements json.Unmarshaler +func (obj *Object) UnmarshalJSON(data []byte) error { + data = bytes.TrimSpace(data) -const ( - Integer Type = iota - String - Raw - Boolean -) + if data[0] == '"' { + obj.Type = String + return json.Unmarshal(data, &obj.StringValue) + } else if data[0] == 't' || data[0] == 'f' { + obj.Type = Bool + return json.Unmarshal(data, &obj.BoolValue) + } else if data[0] == 'n' { + obj.Type = Null + return nil + } else if data[0] == '{' { + obj.Type = Map + return json.Unmarshal(data, &obj.MapValue) + } else if data[0] == '[' { + obj.Type = Slice + return json.Unmarshal(data, &obj.SliceValue) + } + + number := string(data) + intValue, err := strconv.ParseInt(number, 10, 32) + if err == nil { + obj.Type = Int + obj.IntValue = int32(intValue) + return nil + } + + floatValue, err := strconv.ParseFloat(number, 64) + if err == nil { + obj.Type = Float + obj.FloatValue = floatValue + return nil + } + + return fmt.Errorf("json invalid number %q", number) +} + +// MarshalJSON marshal the given json object into the respective Object subtype. +func (obj Object) MarshalJSON() ([]byte, error) { + switch obj.Type { + case String: + return []byte(fmt.Sprintf(`%q`, obj.StringValue)), nil + case Int: + return []byte(fmt.Sprintf(`%d`, obj.IntValue)), nil + case Float: + return []byte(fmt.Sprintf(`%f`, obj.FloatValue)), nil + case Map: + return json.Marshal(obj.MapValue) + case Slice: + return json.Marshal(obj.SliceValue) + case Bool: + return []byte(fmt.Sprintf(`%t`, obj.BoolValue)), nil + case Null: + return []byte("null"), nil + default: + panic("object invalid type") + } +} + +func FromString(val string) Object { + return Object{Type: String, StringValue: val} +} func FromInt(val int) Object { if val > math.MaxInt32 || val < math.MinInt32 { fmt.Println(fmt.Errorf("value: %d overflows int32", val)) } - return Object{Type: Integer, IntVal: int32(val)} + return Object{Type: Int, IntValue: int32(val)} } -func FromString(val string) Object { - return Object{Type: String, StrVal: val} +func FromFloat(val float64) Object { + if val > math.MaxFloat64 || val < -math.MaxFloat64 { + fmt.Println(fmt.Errorf("value: %f overflows float64", val)) + } + return Object{Type: Float, FloatValue: float64(val)} } -func FromBool(val bool) Object { - return Object{Type: Boolean, BoolValue: val} +func FromMap(mapValue map[string]any) Object { + mapValueObject := make(map[string]Object, len(mapValue)) + for key, value := range mapValue { + mapValueObject[key] = FromInterface(value) + } + return Object{Type: Map, MapValue: mapValueObject} } -func FromRaw(val interface{}) Object { - custom, err := json.Marshal(val) - if err != nil { - er := fmt.Errorf("failed to parse value to Raw: %w", err) - fmt.Println(er.Error()) - return Object{} +func FromSlice(sliceValue []any) Object { + sliceValueObject := make([]Object, len(sliceValue)) + for key, value := range sliceValue { + sliceValueObject[key] = FromInterface(value) } - return Object{Type: Raw, RawValue: custom} + return Object{Type: Slice, SliceValue: sliceValueObject} } -// UnmarshalJSON implements json.Unmarshaler -func (obj *Object) UnmarshalJSON(data []byte) error { - if data[0] == '"' { - obj.Type = String - return json.Unmarshal(data, &obj.StrVal) - } else if data[0] == 't' || data[0] == 'f' { - obj.Type = Boolean - return json.Unmarshal(data, &obj.BoolValue) - } else if data[0] == '{' { - obj.Type = Raw - return json.Unmarshal(data, &obj.RawValue) +func FromBool(val bool) Object { + return Object{Type: Bool, BoolValue: val} +} + +func FromNull() Object { + return Object{Type: Null} +} + +func FromInterface(value any) Object { + switch v := value.(type) { + case string: + return FromString(v) + case int: + return FromInt(v) + case int32: + return FromInt(int(v)) + case float64: + return FromFloat(v) + case map[string]any: + return FromMap(v) + case []any: + return FromSlice(v) + case bool: + return FromBool(v) + case nil: + return FromNull() } - obj.Type = Integer - return json.Unmarshal(data, &obj.IntVal) + panic("invalid type") } -// MarshalJSON marshal the given json object into the respective Object subtype. -func (obj Object) MarshalJSON() ([]byte, error) { - switch obj.Type { +func ToInterface(object Object) any { + switch object.Type { case String: - return []byte(fmt.Sprintf(`%q`, obj.StrVal)), nil - case Boolean: - return []byte(fmt.Sprintf(`%t`, obj.BoolValue)), nil - case Integer: - return []byte(fmt.Sprintf(`%d`, obj.IntVal)), nil - case Raw: - val, _ := json.Marshal(obj.RawValue) - return val, nil - default: - return []byte(fmt.Sprintf("%+v", obj)), nil + return object.StringValue + case Int: + return object.IntValue + case Float: + return object.FloatValue + case Map: + mapInterface := make(map[string]any, len(object.MapValue)) + for key, value := range object.MapValue { + mapInterface[key] = ToInterface(value) + } + return mapInterface + case Slice: + sliceInterface := make([]any, len(object.SliceValue)) + for key, value := range object.SliceValue { + sliceInterface[key] = ToInterface(value) + } + return sliceInterface + case Bool: + return object.BoolValue + case Null: + return nil } + panic("invalid type") } diff --git a/model/object_test.go b/model/object_test.go new file mode 100644 index 0000000..0cf928f --- /dev/null +++ b/model/object_test.go @@ -0,0 +1,181 @@ +// Copyright 2022 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_unmarshal(t *testing.T) { + testCases := []struct { + name string + json string + object Object + any any + err string + }{ + { + name: "string", + json: "\"value\"", + object: FromString("value"), + any: any("value"), + }, + { + name: "int", + json: "123", + object: FromInt(123), + any: any(int32(123)), + }, + { + name: "float", + json: "123.123", + object: FromFloat(123.123), + any: any(123.123), + }, + { + name: "map", + json: "{\"key\": \"value\", \"key2\": 123}", + object: FromMap(map[string]any{"key": "value", "key2": 123}), + any: any(map[string]any{"key": "value", "key2": int32(123)}), + }, + { + name: "slice", + json: "[\"key\", 123]", + object: FromSlice([]any{"key", 123}), + any: any([]any{"key", int32(123)}), + }, + { + name: "bool true", + json: "true", + object: FromBool(true), + any: any(true), + }, + { + name: "bool false", + json: "false", + object: FromBool(false), + any: any(false), + }, + { + name: "null", + json: "null", + object: FromNull(), + any: nil, + }, + { + name: "string invalid", + json: "\"invalid", + err: "unexpected end of JSON input", + }, + { + name: "number invalid", + json: "123a", + err: "invalid character 'a' after top-level value", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + o := Object{} + err := json.Unmarshal([]byte(tc.json), &o) + if tc.err == "" { + assert.NoError(t, err) + assert.Equal(t, tc.object, o) + assert.Equal(t, ToInterface(tc.object), tc.any) + } else { + assert.Equal(t, tc.err, err.Error()) + } + }) + } +} + +func Test_marshal(t *testing.T) { + testCases := []struct { + name string + json string + object Object + err string + }{ + { + name: "string", + json: "\"value\"", + object: FromString("value"), + }, + { + name: "int", + json: "123", + object: FromInt(123), + }, + { + name: "float", + json: "123.123000", + object: FromFloat(123.123), + }, + { + name: "map", + json: "{\"key\":\"value\",\"key2\":123}", + object: FromMap(map[string]any{"key": "value", "key2": 123}), + }, + { + name: "slice", + json: "[\"key\",123]", + object: FromSlice([]any{"key", 123}), + }, + { + name: "bool true", + json: "true", + object: FromBool(true), + }, + { + name: "bool false", + json: "false", + object: FromBool(false), + }, + { + name: "null", + json: "null", + object: FromNull(), + }, + { + name: "interface", + json: "[\"value\",123,123.123000,[1],{\"key\":1.100000},true,false,null]", + object: FromInterface([]any{ + "value", + 123, + 123.123, + []any{1}, + map[string]any{"key": 1.1}, + true, + false, + nil, + }), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + json, err := json.Marshal(tc.object) + if tc.err == "" { + assert.NoError(t, err) + assert.Equal(t, tc.json, string(json)) + } else { + assert.Equal(t, tc.err, err.Error()) + } + }) + } +} diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 804706f..3e76ab1 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -1101,10 +1101,19 @@ func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Object) DeepCopyInto(out *Object) { *out = *in - if in.RawValue != nil { - in, out := &in.RawValue, &out.RawValue - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) + if in.MapValue != nil { + in, out := &in.MapValue, &out.MapValue + *out = make(map[string]Object, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.SliceValue != nil { + in, out := &in.SliceValue, &out.SliceValue + *out = make([]Object, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } } return } diff --git a/parser/parser_test.go b/parser/parser_test.go index c5cf0f0..fdf70d8 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -455,13 +455,21 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "visaApprovedEvent", w.States[3].EventConditions[0].Name) assert.Equal(t, "visaApprovedEventRef", w.States[3].EventConditions[0].EventRef) assert.Equal(t, "HandleApprovedVisa", w.States[3].EventConditions[0].Transition.NextState) - assert.Equal(t, model.Metadata{"mastercard": model.Object{Type: 1, IntVal: 0, StrVal: "disallowed", RawValue: json.RawMessage(nil)}, - "visa": model.Object{Type: 1, IntVal: 0, StrVal: "allowed", RawValue: json.RawMessage(nil)}}, - w.States[3].EventConditions[0].Metadata) + assert.Equal(t, + model.Metadata{ + "mastercard": model.FromString("disallowed"), + "visa": model.FromString("allowed"), + }, + w.States[3].EventConditions[0].Metadata, + ) assert.Equal(t, "visaRejectedEvent", w.States[3].EventConditions[1].EventRef) assert.Equal(t, "HandleRejectedVisa", w.States[3].EventConditions[1].Transition.NextState) - assert.Equal(t, model.Metadata{"test": model.Object{Type: 1, IntVal: 0, StrVal: "tested", RawValue: json.RawMessage(nil)}}, - w.States[3].EventConditions[1].Metadata) + assert.Equal(t, + model.Metadata{ + "test": model.FromString("tested"), + }, + w.States[3].EventConditions[1].Metadata, + ) assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) @@ -534,8 +542,14 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "CheckCreditCallback", w.States[8].Name) assert.Equal(t, model.StateTypeCallback, w.States[8].Type) assert.Equal(t, "callCreditCheckMicroservice", w.States[8].CallbackState.Action.FunctionRef.RefName) - assert.Equal(t, map[string]model.Object{"argsObj": model.FromRaw(map[string]interface{}{"age": 10, "name": "hi"}), "customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, - w.States[8].CallbackState.Action.FunctionRef.Arguments) + assert.Equal(t, + map[string]model.Object{ + "argsObj": model.FromMap(map[string]interface{}{"age": 10, "name": "hi"}), + "customer": model.FromString("${ .customer }"), + "time": model.FromInt(48), + }, + w.States[8].CallbackState.Action.FunctionRef.Arguments, + ) assert.Equal(t, "PT10S", w.States[8].CallbackState.Action.Sleep.Before) assert.Equal(t, "PT20S", w.States[8].CallbackState.Action.Sleep.After) assert.Equal(t, "PT150M", w.States[8].CallbackState.Timeouts.ActionExecTimeout) From 5a23a17425136ef6d85cf255582226f6f8f91453 Mon Sep 17 00:00:00 2001 From: Venera <31911811+venera-program@users.noreply.github.com> Date: Wed, 13 Sep 2023 07:56:59 -0700 Subject: [PATCH 076/110] Fixed Cron.ValidUntil using incorrect ISO8601 parsing format (#187) Signed-off-by: Venera <31911811+venera-program@users.noreply.github.com> --- go.mod | 1 + go.sum | 2 ++ model/workflow.go | 2 +- validator/validator.go | 17 +++++++++++++ validator/validator_test.go | 48 +++++++++++++++++++++++++++++++++++++ 5 files changed, 69 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index ea25056..fcbcf95 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.19 require ( github.com/go-playground/validator/v10 v10.11.1 github.com/pkg/errors v0.9.1 + github.com/relvacode/iso8601 v1.3.0 github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 github.com/stretchr/testify v1.8.0 gopkg.in/yaml.v3 v3.0.1 diff --git a/go.sum b/go.sum index e2bb434..84f4c23 100644 --- a/go.sum +++ b/go.sum @@ -45,6 +45,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/relvacode/iso8601 v1.3.0 h1:HguUjsGpIMh/zsTczGN3DVJFxTU/GX+MMmzcKoMO7ko= +github.com/relvacode/iso8601 v1.3.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= diff --git a/model/workflow.go b/model/workflow.go index 58b382a..6777895 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -372,7 +372,7 @@ type Cron struct { Expression string `json:"expression" validate:"required"` // Specific date and time (ISO 8601 format) when the cron expression is no longer valid. // +optional - ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601duration"` + ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601datetime"` } type cronUnmarshal Cron diff --git a/validator/validator.go b/validator/validator.go index 1e77b36..c2ae024 100644 --- a/validator/validator.go +++ b/validator/validator.go @@ -18,6 +18,7 @@ import ( "context" "strconv" + "github.com/relvacode/iso8601" "github.com/senseyeio/duration" "k8s.io/apimachinery/pkg/util/intstr" @@ -41,6 +42,11 @@ func init() { panic(err) } + err = validate.RegisterValidationCtx("iso8601datetime", validateISO8601DatetimeFunc) + if err != nil { + panic(err) + } + err = validate.RegisterValidation("oneofkind", oneOfKind) if err != nil { panic(err) @@ -63,6 +69,17 @@ func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) return err == nil } +// ValidateISO8601Datetime validate the string is iso8601 Datetime format +func ValidateISO8601Datetime(s string) error { + _, err := iso8601.ParseString(s) + return err +} + +func validateISO8601DatetimeFunc(_ context.Context, fl validator.FieldLevel) bool { + err := ValidateISO8601Datetime(fl.Field().String()) + return err == nil +} + func oneOfKind(fl validator.FieldLevel) bool { if val, ok := fl.Field().Interface().(Kind); ok { for _, value := range val.KindValues() { diff --git a/validator/validator_test.go b/validator/validator_test.go index 73ef555..8dd6c9c 100644 --- a/validator/validator_test.go +++ b/validator/validator_test.go @@ -59,6 +59,54 @@ func TestValidateISO8601TimeDuration(t *testing.T) { } } +func TestValidateISO8601Timestamp(t *testing.T) { + type testCase struct { + desp string + s string + err string + } + testCases := []testCase{ + { + desp: "workflow_spec_example", + s: "2021-11-05T08:15:30-05:00", + err: ``, + }, + { + desp: "datetime", + s: "2023-09-08T20:15:46+00:00", + err: ``, + }, + { + desp: "date", + s: "2023-09-08", + err: ``, + }, + { + desp: "time", + s: "13:15:33.074-07:00", + err: "iso8601: Unexpected character `:`", + }, + { + desp: "empty value", + s: "", + err: `iso8601: Cannot parse "": month 0 is not in range 1-12`, + }, + } + for _, tc := range testCases { + t.Run(tc.desp, func(t *testing.T) { + err := ValidateISO8601Datetime(tc.s) + + if tc.err != "" { + assert.Error(t, err) + assert.Regexp(t, tc.err, err) + return + } + + assert.NoError(t, err) + }) + } +} + type testKind string func (k testKind) KindValues() []string { From f50885cae862ee60e3b0ae6ae5b417e92742f58c Mon Sep 17 00:00:00 2001 From: Venera <31911811+venera-program@users.noreply.github.com> Date: Tue, 19 Sep 2023 07:06:16 -0700 Subject: [PATCH 077/110] Added CEL to list of valid ExpressionLangs (#188) Signed-off-by: Venera <31911811+venera-program@users.noreply.github.com> --- model/workflow.go | 6 +++++- model/workflow_validator_test.go | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/model/workflow.go b/model/workflow.go index 6777895..6b6ac9d 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -75,6 +75,7 @@ func (i ExpressionLangType) KindValues() []string { return []string{ string(JqExpressionLang), string(JsonPathExpressionLang), + string(CELExpressionLang), } } @@ -88,6 +89,9 @@ const ( // JsonPathExpressionLang ... JsonPathExpressionLang ExpressionLangType = "jsonpath" + + // CELExpressionLang + CELExpressionLang ExpressionLangType = "cel" ) // BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces @@ -132,7 +136,7 @@ type BaseWorkflow struct { // +optional Constants *Constants `json:"constants,omitempty"` // Identifies the expression language used for workflow expressions. Default is 'jq'. - // +kubebuilder:validation:Enum=jq;jsonpath + // +kubebuilder:validation:Enum=jq;jsonpath;cel // +kubebuilder:default=jq // +optional ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"required,oneofkind"` diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go index 10e935a..eff6154 100644 --- a/model/workflow_validator_test.go +++ b/model/workflow_validator_test.go @@ -162,7 +162,7 @@ workflow.key required when "workflow.id" is not defined`, model.BaseWorkflow.ExpressionLang = JqExpressionLang + "invalid" return *model }, - Err: `workflow.expressionLang need by one of [jq jsonpath]`, + Err: `workflow.expressionLang need by one of [jq jsonpath cel]`, }, } From 78b1c062e37ac580e35fb4f3c387fc9e637177e0 Mon Sep 17 00:00:00 2001 From: Venera <31911811+venera-program@users.noreply.github.com> Date: Wed, 4 Oct 2023 05:08:55 -0700 Subject: [PATCH 078/110] Removed "required" tag from failOnValidationErrors (#192) Signed-off-by: Venera <31911811+venera-program@users.noreply.github.com> --- model/workflow.go | 3 +- model/workflow_validator_test.go | 33 +++++++++++++++++++ parser/parser_test.go | 7 ++++ .../workflows/dataInputSchemaValidation.yaml | 28 ++++++++++++++++ 4 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 parser/testdata/workflows/dataInputSchemaValidation.yaml diff --git a/model/workflow.go b/model/workflow.go index 6b6ac9d..24b41a7 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -507,7 +507,8 @@ type DataInputSchema struct { // +kubebuilder:validation:Required Schema string `json:"schema" validate:"required"` // +kubebuilder:validation:Required - FailOnValidationErrors bool `json:"failOnValidationErrors" validate:"required"` + FailOnValidationErrors bool `json:"failOnValidationErrors"` + // FailOnValidationErrors bool `json:"failOnValidationErrors" validate:"required"` } type dataInputSchemaUnmarshal DataInputSchema diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go index eff6154..9cdb77e 100644 --- a/model/workflow_validator_test.go +++ b/model/workflow_validator_test.go @@ -417,6 +417,39 @@ Key: 'Workflow.States[3].BaseState.Transition.NextState' Error:Field validation StructLevelValidationCtx(t, testCases) } +func TestDataInputSchemaStructLevelValidation(t *testing.T) { + baseWorkflow := buildWorkflow() + + operationState := buildOperationState(baseWorkflow, "start state") + buildEndByState(operationState, true, false) + action1 := buildActionByOperationState(operationState, "action 1") + buildFunctionRef(baseWorkflow, action1, "function 1") + + testCases := []ValidationCase{ + { + Desp: "empty DataInputSchema", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.DataInputSchema = &DataInputSchema{} + return *model + }, + Err: `workflow.dataInputSchema.schema is required`, + }, + { + Desp: "filled Schema, default failOnValidationErrors", + Model: func() Workflow { + model := baseWorkflow.DeepCopy() + model.DataInputSchema = &DataInputSchema{ + Schema: "sample schema", + } + return *model + }, + }, + } + + StructLevelValidationCtx(t, testCases) +} + func TestSecretsStructLevelValidation(t *testing.T) { baseWorkflow := buildWorkflow() diff --git a/parser/parser_test.go b/parser/parser_test.go index fdf70d8..91dc273 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -579,6 +579,13 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) assert.Equal(t, true, w.States[10].End.Terminate) }, + }, { + "./testdata/workflows/dataInputSchemaValidation.yaml", func(t *testing.T, w *model.Workflow) { + assert.NotNil(t, w.DataInputSchema) + + assert.Equal(t, "sample schema", w.DataInputSchema.Schema) + assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) + }, }, } for _, file := range files { diff --git a/parser/testdata/workflows/dataInputSchemaValidation.yaml b/parser/testdata/workflows/dataInputSchemaValidation.yaml new file mode 100644 index 0000000..ed685a6 --- /dev/null +++ b/parser/testdata/workflows/dataInputSchemaValidation.yaml @@ -0,0 +1,28 @@ +# Copyright 2023 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: Valid DataInputSchema +version: '1.0' +specVersion: '0.8' +start: Start +dataInputSchema: + failOnValidationErrors: false + schema: "sample schema" +states: +- name: Start + type: inject + data: + done: true + end: + terminate: true \ No newline at end of file From da1af042b03b27f57ff47aa69376db6659bae3d0 Mon Sep 17 00:00:00 2001 From: Venera <31911811+venera-program@users.noreply.github.com> Date: Mon, 9 Oct 2023 07:36:06 -0700 Subject: [PATCH 079/110] Remove bad comment (#193) Signed-off-by: Venera <31911811+venera-program@users.noreply.github.com> --- model/workflow.go | 1 - 1 file changed, 1 deletion(-) diff --git a/model/workflow.go b/model/workflow.go index 24b41a7..3fddfb8 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -508,7 +508,6 @@ type DataInputSchema struct { Schema string `json:"schema" validate:"required"` // +kubebuilder:validation:Required FailOnValidationErrors bool `json:"failOnValidationErrors"` - // FailOnValidationErrors bool `json:"failOnValidationErrors" validate:"required"` } type dataInputSchemaUnmarshal DataInputSchema From 9eb522ffc96c8fb9c935c040dcfc11f3bf9b5696 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:12:24 -0300 Subject: [PATCH 080/110] Update Code of Conduct to follow the new standard (#191) Signed-off-by: Ricardo Zanini --- code-of-conduct.md | 65 +++++++--------------------------------------- 1 file changed, 9 insertions(+), 56 deletions(-) diff --git a/code-of-conduct.md b/code-of-conduct.md index ddd14b6..97a8526 100644 --- a/code-of-conduct.md +++ b/code-of-conduct.md @@ -1,58 +1,11 @@ -## CNCF Community Code of Conduct v1.0 +# Code of Conduct -Other languages available: -- [Chinese/δΈ­ζ–‡](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/zh.md) -- [German/Deutsch](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/de.md) -- [Spanish/EspaΓ±ol](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/es.md) -- [French/FranΓ§ais](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/fr.md) -- [Italian/Italiano](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/it.md) -- [Japanese/ζ—₯本θͺž](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/jp.md) -- [Korean/ν•œκ΅­μ–΄](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ko.md) -- [Ukrainian/Π£ΠΊΡ€Π°Ρ—Π½ΡΡŒΠΊΠ°](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/uk.md) -- [Russian/Русский](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ru.md) -- [Portuguese/PortuguΓͺs](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pt.md) -- [Arabic/Ψ§Ω„ΨΉΨ±Ψ¨ΩŠΨ©](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ar.md) -- [Polish/Polski](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pl.md) +We follow the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/main/code-of-conduct.md). -### Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering -an open and welcoming community, we pledge to respect all people who contribute -through reporting issues, posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for -everyone, regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, body size, race, ethnicity, age, -religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing others' private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are not -aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers -commit themselves to fairly and consistently applying these principles to every aspect -of managing this project. Project maintainers who do not follow or enforce the Code of -Conduct may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior in Kubernetes may be reported by contacting the [Kubernetes Code of Conduct Committee](https://git.k8s.io/community/committee-code-of-conduct) via conduct@kubernetes.io. For other projects, please contact a CNCF project maintainer or our mediator, Mishi Choudhary via mishi@linux.com. - -This Code of Conduct is adapted from the Contributor Covenant -(), version 1.2.0, available at - - -### CNCF Events Code of Conduct - -CNCF events are governed by the Linux Foundation [Code of Conduct](https://events.linuxfoundation.org/code-of-conduct/) available on the event page. -This is designed to be compatible with the above policy and also includes more details on responding to incidents. \ No newline at end of file + +Please contact the [CNCF Code of Conduct Committee](mailto:conduct@cncf.io) +in order to report violations of the Code of Conduct. From d19b014b0dcb2dbc27e53edebe713d7100d9cba7 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 30 Oct 2023 11:01:18 -0300 Subject: [PATCH 081/110] Update README to reflect the new release --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0bcad4f..26491b0 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.4](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.4) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.2.5](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.5) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 48c5d8e868d287b610b3a0d261a906322408a86a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Mon, 18 Dec 2023 12:17:09 -0300 Subject: [PATCH 082/110] Programmatically build workflow (#190) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Programmatically build workflow Signed-off-by: AndrΓ© R. de Miranda * Add builder generated by builder-gen Signed-off-by: AndrΓ© R. de Miranda * Bump version golang.org/x/net, and add file LICENSE Signed-off-by: AndrΓ© R. de Miranda * Builder ApplyDefault Signed-off-by: AndrΓ© R. de Miranda * Add builder suuport to map, exec validator, and improve tests Signed-off-by: AndrΓ© R. de Miranda * Improve builder embedded, add method to remove element Signed-off-by: AndrΓ© R. de Miranda * Improve return builder embedded Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- Makefile | 7 +- builder/builder.go | 53 + builder/builder_test.go | 94 + go.mod | 8 +- go.sum | 12 +- hack/builder-gen.sh | 48 + model/action.go | 2 + model/action_data_filter.go | 3 +- model/event.go | 3 + model/event_data_filter.go | 1 + model/event_state.go | 2 + model/foreach_state.go | 1 + model/function.go | 1 + model/operation_state.go | 1 + model/parallel_state.go | 1 + model/retry.go | 1 + model/states.go | 1 + model/workflow.go | 15 +- model/workflow_ref.go | 1 + model/workflow_test.go | 4 +- model/workflow_validator.go | 1 + model/zz_generated.buildergen.go | 3108 ++++++++++++++++++++++++++++++ 22 files changed, 3348 insertions(+), 20 deletions(-) create mode 100644 builder/builder.go create mode 100644 builder/builder_test.go create mode 100755 hack/builder-gen.sh create mode 100644 model/zz_generated.buildergen.go diff --git a/Makefile b/Makefile index ff78471..0b0833f 100644 --- a/Makefile +++ b/Makefile @@ -14,14 +14,17 @@ lint: .PHONY: test coverage="false" -test: deepcopy +test: deepcopy buildergen make lint @go test ./... -.PHONY: deepcopy +.PHONY: deepcopy buildergen deepcopy: $(DEEPCOPY_GEN) ## Download deepcopy-gen locally if necessary. ./hack/deepcopy-gen.sh deepcopy +buildergen: $(BUILDER_GEN) ## Download builder-gen locally if necessary. + ./hack/builder-gen.sh buildergen + .PHONY: kube-integration kube-integration: controller-gen $(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./..." output:crd:artifacts:config=config/crd/bases diff --git a/builder/builder.go b/builder/builder.go new file mode 100644 index 0000000..1bb4089 --- /dev/null +++ b/builder/builder.go @@ -0,0 +1,53 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "encoding/json" + + "sigs.k8s.io/yaml" + + "github.com/serverlessworkflow/sdk-go/v2/model" + val "github.com/serverlessworkflow/sdk-go/v2/validator" +) + +func New() *model.WorkflowBuilder { + return model.NewWorkflowBuilder() +} + +func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { + workflow := builder.Build() + ctx := model.NewValidatorContext(&workflow) + if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { + return nil, err + } + return &workflow, nil +} + +func Json(builder *model.WorkflowBuilder) ([]byte, error) { + workflow, err := Object(builder) + if err != nil { + return nil, err + } + return json.Marshal(workflow) +} + +func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { + data, err := Json(builder) + if err != nil { + return nil, err + } + return yaml.JSONToYAML(data) +} diff --git a/builder/builder_test.go b/builder/builder_test.go new file mode 100644 index 0000000..5aa661f --- /dev/null +++ b/builder/builder_test.go @@ -0,0 +1,94 @@ +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "testing" + + "github.com/serverlessworkflow/sdk-go/v2/model" + "github.com/stretchr/testify/assert" +) + +func prepareBuilder() *model.WorkflowBuilder { + builder := New().Key("key test").ID("id test") + + builder.AddFunctions().Name("function name").Operation("http://test") + builder.AddFunctions().Name("function name2").Operation("http://test") + + function3 := builder.AddFunctions().Name("function name2").Operation("http://test") + builder.RemoveFunctions(function3) + + state1 := builder.AddStates(). + Name("state"). + Type(model.StateTypeInject) + state1.End().Terminate(true) + + inject := state1.InjectState() + inject.Data(map[string]model.Object{ + "test": model.FromMap(map[string]any{}), + }) + + return builder +} + +func TestObject(t *testing.T) { + workflow, err := Object(prepareBuilder()) + if assert.NoError(t, err) { + assert.Equal(t, "key test", workflow.Key) + assert.Equal(t, "id test", workflow.ID) + assert.Equal(t, "0.8", workflow.SpecVersion) + assert.Equal(t, "jq", workflow.ExpressionLang.String()) + assert.Equal(t, 2, len(workflow.Functions)) + + assert.Equal(t, "function name", workflow.Functions[0].Name) + assert.Equal(t, "function name2", workflow.Functions[1].Name) + } +} + +func TestJson(t *testing.T) { + data, err := Json(prepareBuilder()) + if assert.NoError(t, err) { + d := `{"id":"id test","key":"key test","version":"","specVersion":"0.8","expressionLang":"jq","states":[{"name":"state","type":"inject","end":{"terminate":true},"data":{"test":{}}}],"functions":[{"name":"function name","operation":"http://test","type":"rest"},{"name":"function name2","operation":"http://test","type":"rest"}]}` + assert.Equal(t, d, string(data)) + } +} + +func TestYaml(t *testing.T) { + data, err := Yaml(prepareBuilder()) + if assert.NoError(t, err) { + d := `expressionLang: jq +functions: +- name: function name + operation: http://test + type: rest +- name: function name2 + operation: http://test + type: rest +id: id test +key: key test +specVersion: "0.8" +states: +- data: + test: {} + end: + terminate: true + name: state + type: inject +version: "" +` + + assert.Equal(t, d, string(data)) + } +} diff --git a/go.mod b/go.mod index fcbcf95..bbb30d4 100644 --- a/go.mod +++ b/go.mod @@ -26,10 +26,10 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d // indirect - golang.org/x/net v0.7.0 // indirect - golang.org/x/sys v0.5.0 // indirect - golang.org/x/text v0.7.0 // indirect + golang.org/x/crypto v0.15.0 // indirect + golang.org/x/net v0.18.0 // indirect + golang.org/x/sys v0.14.0 // indirect + golang.org/x/text v0.14.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 // indirect diff --git a/go.sum b/go.sum index 84f4c23..b705b2d 100644 --- a/go.sum +++ b/go.sum @@ -69,8 +69,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d h1:3qF+Z8Hkrw9sOhrFHti9TlB1Hkac1x+DNRkv0XQiFjo= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= +golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -81,8 +81,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= +golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -96,8 +96,8 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= +golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= diff --git a/hack/builder-gen.sh b/hack/builder-gen.sh new file mode 100755 index 0000000..083b187 --- /dev/null +++ b/hack/builder-gen.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh +# and adapted to only install and run the deepcopy-gen + +set -o errexit +set -o nounset +set -o pipefail + +SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. +echo "Script root is $SCRIPT_ROOT" + +GENS="$1" +shift 1 + +( + # To support running this script from anywhere, first cd into this directory, + # and then install with forced module mode on and fully qualified name. + # make sure your GOPATH env is properly set. + # it will go under $GOPATH/bin + cd "$(dirname "${0}")" + GO111MODULE=on go install github.com/galgotech/builder-gen@latest +) + +function codegen::join() { local IFS="$1"; shift; echo "$*"; } + +if [ "${GENS}" = "all" ] || grep -qw "buildergen" <<<"${GENS}"; then + echo "Generating buildergen funcs" + export GO111MODULE=on + # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 + "${GOPATH}/bin/builder-gen" -v 1 \ + --input-dirs ./model -O zz_generated.buildergen \ + --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" + "$@" +fi diff --git a/model/action.go b/model/action.go index a8d5705..7bc4fba 100644 --- a/model/action.go +++ b/model/action.go @@ -17,6 +17,7 @@ package model import "github.com/serverlessworkflow/sdk-go/v2/util" // Action specify invocations of services or other workflows during workflow execution. +// +builder-gen:new-call=ApplyDefault type Action struct { // Defines Unique action identifier. // +optional @@ -72,6 +73,7 @@ func (a *Action) ApplyDefault() { } // FunctionRef defines the reference to a reusable function definition +// +builder-gen:new-call=ApplyDefault type FunctionRef struct { // Name of the referenced function. // +kubebuilder:validation:Required diff --git a/model/action_data_filter.go b/model/action_data_filter.go index 060f12f..e929f6b 100644 --- a/model/action_data_filter.go +++ b/model/action_data_filter.go @@ -18,11 +18,10 @@ import "github.com/serverlessworkflow/sdk-go/v2/util" // ActionDataFilter used to filter action data results. // +optional -// +optional +// +builder-gen:new-call=ApplyDefault type ActionDataFilter struct { // Workflow expression that filters state data that can be used by the action. // +optional - // +optional FromStateData string `json:"fromStateData,omitempty"` // If set to false, action data results are not added/merged to state data. In this case 'results' // and 'toStateData' should be ignored. Default is true. diff --git a/model/event.go b/model/event.go index a9c5a69..96069bf 100644 --- a/model/event.go +++ b/model/event.go @@ -39,6 +39,7 @@ const ( ) // Event used to define events and their correlations +// +builder-gen:new-call=ApplyDefault type Event struct { Common `json:",inline"` // Unique event name. @@ -56,6 +57,7 @@ type Event struct { Kind EventKind `json:"kind,omitempty" validate:"required,oneofkind"` // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload // and context attributes should be accessible. Defaults to true. + // +kubebuilder:default=true // +optional DataOnly bool `json:"dataOnly,omitempty"` // Define event correlation rules for this event. Only used for consumed events. @@ -88,6 +90,7 @@ type Correlation struct { } // EventRef defining invocation of a function via event +// +builder-gen:new-call=ApplyDefault type EventRef struct { // Reference to the unique name of a 'produced' event definition, // +kubebuilder:validation:Required diff --git a/model/event_data_filter.go b/model/event_data_filter.go index a725a1b..1db5bbf 100644 --- a/model/event_data_filter.go +++ b/model/event_data_filter.go @@ -17,6 +17,7 @@ package model import "github.com/serverlessworkflow/sdk-go/v2/util" // EventDataFilter used to filter consumed event payloads. +// +builder-gen:new-call=ApplyDefault type EventDataFilter struct { // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' // should be ignored. Default is true. diff --git a/model/event_state.go b/model/event_state.go index 37d3840..39bd590 100644 --- a/model/event_state.go +++ b/model/event_state.go @@ -22,6 +22,7 @@ import ( // EventState await one or more events and perform actions when they are received. If defined as the // workflow starting state, the event state definition controls when the workflow instances should be created. +// +builder-gen:new-call=ApplyDefault type EventState struct { // TODO: EventState doesn't have usedForCompensation field. @@ -64,6 +65,7 @@ func (e *EventState) ApplyDefault() { } // OnEvents define which actions are be performed for the one or more events. +// +builder-gen:new-call=ApplyDefault type OnEvents struct { // References one or more unique event names in the defined workflow events. // +kubebuilder:validation:MinItems=1 diff --git a/model/foreach_state.go b/model/foreach_state.go index 3edb891..aa19f4e 100644 --- a/model/foreach_state.go +++ b/model/foreach_state.go @@ -44,6 +44,7 @@ const ( ) // ForEachState used to execute actions for each element of a data set. +// +builder-gen:new-call=ApplyDefault type ForEachState struct { // Workflow expression selecting an array element of the states' data. // +kubebuilder:validation:Required diff --git a/model/function.go b/model/function.go index 07e6f77..7cf4197 100644 --- a/model/function.go +++ b/model/function.go @@ -59,6 +59,7 @@ func (i FunctionType) String() string { } // Function ... +// +builder-gen:new-call=ApplyDefault type Function struct { Common `json:",inline"` // Unique function name diff --git a/model/operation_state.go b/model/operation_state.go index 8a88e3b..c530ad8 100644 --- a/model/operation_state.go +++ b/model/operation_state.go @@ -21,6 +21,7 @@ import ( ) // OperationState defines a set of actions to be performed in sequence or in parallel. +// +builder-gen:new-call=ApplyDefault type OperationState struct { // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. // +kubebuilder:validation:Enum=sequential;parallel diff --git a/model/parallel_state.go b/model/parallel_state.go index 96edd7a..f65b7a1 100644 --- a/model/parallel_state.go +++ b/model/parallel_state.go @@ -45,6 +45,7 @@ const ( ) // ParallelState Consists of a number of states that are executed in parallel +// +builder-gen:new-call=ApplyDefault type ParallelState struct { // List of branches for this parallel state. // +kubebuilder:validation:MinItems=1 diff --git a/model/retry.go b/model/retry.go index e3c7e10..83e2333 100644 --- a/model/retry.go +++ b/model/retry.go @@ -22,6 +22,7 @@ import ( ) // Retry ... +// +builder-gen:new-call=ApplyDefault type Retry struct { // Unique retry strategy name // +kubebuilder:validation:Required diff --git a/model/states.go b/model/states.go index 5842d9a..fa834f7 100644 --- a/model/states.go +++ b/model/states.go @@ -116,6 +116,7 @@ func (b *BaseState) MarshalJSON() ([]byte, error) { return cus, err } +// +builder-gen:embedded-ignore-method=BaseState type State struct { BaseState `json:",inline"` // delayState Causes the workflow execution to delay for a specified duration. diff --git a/model/workflow.go b/model/workflow.go index 3fddfb8..8f7f032 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -96,6 +96,7 @@ const ( // BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces // to make it easy for custom unmarshalers implementations to unmarshal the common data structure. +// +builder-gen:new-call=ApplyDefault type BaseWorkflow struct { // Workflow unique identifier // +optional @@ -167,6 +168,12 @@ type BaseWorkflow struct { Auth Auths `json:"auth,omitempty" validate:"unique=Name,dive"` } +// ApplyDefault set the default values for Workflow +func (w *BaseWorkflow) ApplyDefault() { + w.SpecVersion = "0.8" + w.ExpressionLang = JqExpressionLang +} + type Auths []Auth type authsUnmarshal Auths @@ -186,6 +193,7 @@ func (e *Errors) UnmarshalJSON(data []byte) error { } // Workflow base definition +// +builder-gen:embedded-ignore-method=BaseWorkflow type Workflow struct { BaseWorkflow `json:",inline"` // +kubebuilder:pruning:PreserveUnknownFields @@ -217,11 +225,6 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { return nil } -// ApplyDefault set the default values for Workflow -func (w *Workflow) ApplyDefault() { - w.ExpressionLang = JqExpressionLang -} - // +kubebuilder:validation:MinItems=1 type States []State @@ -288,6 +291,7 @@ func (t *Timeouts) UnmarshalJSON(data []byte) error { // WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration // format. If not defined, the workflow execution should be given "unlimited" amount of time to complete. +// +builder-gen:new-call=ApplyDefault type WorkflowExecTimeout struct { // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. // +kubebuilder:default=unlimited @@ -503,6 +507,7 @@ type StateDataFilter struct { } // DataInputSchema Used to validate the workflow data input against a defined JSON Schema +// +builder-gen:new-call=ApplyDefault type DataInputSchema struct { // +kubebuilder:validation:Required Schema string `json:"schema" validate:"required"` diff --git a/model/workflow_ref.go b/model/workflow_ref.go index 4c558cc..c1fd1ce 100644 --- a/model/workflow_ref.go +++ b/model/workflow_ref.go @@ -36,6 +36,7 @@ const ( ) // WorkflowRef holds a reference for a workflow definition +// +builder-gen:new-call=ApplyDefault type WorkflowRef struct { // Sub-workflow unique id // +kubebuilder:validation:Required diff --git a/model/workflow_test.go b/model/workflow_test.go index 29a3720..352a751 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -52,6 +52,7 @@ func TestWorkflowStartUnmarshalJSON(t *testing.T) { data: `{"states": [{"name": "start state name", "type": "operation"}]}`, expect: Workflow{ BaseWorkflow: BaseWorkflow{ + SpecVersion: "0.8", ExpressionLang: "jq", Start: &Start{ StateName: "start state name", @@ -72,10 +73,11 @@ func TestWorkflowStartUnmarshalJSON(t *testing.T) { err: ``, }, { - desp: "start empty, and states empty", + desp: "start empty and states empty", data: `{"states": []}`, expect: Workflow{ BaseWorkflow: BaseWorkflow{ + SpecVersion: "0.8", ExpressionLang: "jq", }, States: []State{}, diff --git a/model/workflow_validator.go b/model/workflow_validator.go index 7d94d1f..ad72717 100644 --- a/model/workflow_validator.go +++ b/model/workflow_validator.go @@ -38,6 +38,7 @@ func ValidationWrap(fnCtx WorkflowValidator) validator.StructLevelFuncCtx { } } +// +builder-gen:ignore=true type ValidatorContext struct { States map[string]State Functions map[string]Function diff --git a/model/zz_generated.buildergen.go b/model/zz_generated.buildergen.go new file mode 100644 index 0000000..9ab7058 --- /dev/null +++ b/model/zz_generated.buildergen.go @@ -0,0 +1,3108 @@ +//go:build !ignore_autogenerated +// +build !ignore_autogenerated + +// Code generated by main. DO NOT EDIT. + +package model + +import ( + intstr "k8s.io/apimachinery/pkg/util/intstr" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewActionBuilder() *ActionBuilder { + builder := &ActionBuilder{} + builder.model = Action{} + builder.model.ApplyDefault() + builder.actiondatafilter = NewActionDataFilterBuilder() + return builder +} + +type ActionBuilder struct { + model Action + functionref *FunctionRefBuilder + eventref *EventRefBuilder + subflowref *WorkflowRefBuilder + sleep *SleepBuilder + actiondatafilter *ActionDataFilterBuilder +} + +func (b *ActionBuilder) ID(input string) *ActionBuilder { + b.model.ID = input + return b +} + +func (b *ActionBuilder) Name(input string) *ActionBuilder { + b.model.Name = input + return b +} + +func (b *ActionBuilder) FunctionRef() *FunctionRefBuilder { + if b.functionref == nil { + b.functionref = NewFunctionRefBuilder() + } + return b.functionref +} + +func (b *ActionBuilder) EventRef() *EventRefBuilder { + if b.eventref == nil { + b.eventref = NewEventRefBuilder() + } + return b.eventref +} + +func (b *ActionBuilder) SubFlowRef() *WorkflowRefBuilder { + if b.subflowref == nil { + b.subflowref = NewWorkflowRefBuilder() + } + return b.subflowref +} + +func (b *ActionBuilder) Sleep() *SleepBuilder { + if b.sleep == nil { + b.sleep = NewSleepBuilder() + } + return b.sleep +} + +func (b *ActionBuilder) RetryRef(input string) *ActionBuilder { + b.model.RetryRef = input + return b +} + +func (b *ActionBuilder) NonRetryableErrors(input []string) *ActionBuilder { + b.model.NonRetryableErrors = input + return b +} + +func (b *ActionBuilder) RetryableErrors(input []string) *ActionBuilder { + b.model.RetryableErrors = input + return b +} + +func (b *ActionBuilder) ActionDataFilter() *ActionDataFilterBuilder { + return b.actiondatafilter +} + +func (b *ActionBuilder) Condition(input string) *ActionBuilder { + b.model.Condition = input + return b +} + +func (b *ActionBuilder) Build() Action { + if b.functionref != nil { + functionref := b.functionref.Build() + b.model.FunctionRef = &functionref + } + if b.eventref != nil { + eventref := b.eventref.Build() + b.model.EventRef = &eventref + } + if b.subflowref != nil { + subflowref := b.subflowref.Build() + b.model.SubFlowRef = &subflowref + } + if b.sleep != nil { + sleep := b.sleep.Build() + b.model.Sleep = &sleep + } + b.model.ActionDataFilter = b.actiondatafilter.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewActionDataFilterBuilder() *ActionDataFilterBuilder { + builder := &ActionDataFilterBuilder{} + builder.model = ActionDataFilter{} + builder.model.ApplyDefault() + return builder +} + +type ActionDataFilterBuilder struct { + model ActionDataFilter +} + +func (b *ActionDataFilterBuilder) FromStateData(input string) *ActionDataFilterBuilder { + b.model.FromStateData = input + return b +} + +func (b *ActionDataFilterBuilder) UseResults(input bool) *ActionDataFilterBuilder { + b.model.UseResults = input + return b +} + +func (b *ActionDataFilterBuilder) Results(input string) *ActionDataFilterBuilder { + b.model.Results = input + return b +} + +func (b *ActionDataFilterBuilder) ToStateData(input string) *ActionDataFilterBuilder { + b.model.ToStateData = input + return b +} + +func (b *ActionDataFilterBuilder) Build() ActionDataFilter { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewAuthBuilder() *AuthBuilder { + builder := &AuthBuilder{} + builder.model = Auth{} + builder.properties = NewAuthPropertiesBuilder() + return builder +} + +type AuthBuilder struct { + model Auth + properties *AuthPropertiesBuilder +} + +func (b *AuthBuilder) Name(input string) *AuthBuilder { + b.model.Name = input + return b +} + +func (b *AuthBuilder) Scheme(input AuthType) *AuthBuilder { + b.model.Scheme = input + return b +} + +func (b *AuthBuilder) Properties() *AuthPropertiesBuilder { + return b.properties +} + +func (b *AuthBuilder) Build() Auth { + b.model.Properties = b.properties.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewAuthPropertiesBuilder() *AuthPropertiesBuilder { + builder := &AuthPropertiesBuilder{} + builder.model = AuthProperties{} + return builder +} + +type AuthPropertiesBuilder struct { + model AuthProperties + basic *BasicAuthPropertiesBuilder + bearer *BearerAuthPropertiesBuilder + oauth2 *OAuth2AuthPropertiesBuilder +} + +func (b *AuthPropertiesBuilder) Basic() *BasicAuthPropertiesBuilder { + if b.basic == nil { + b.basic = NewBasicAuthPropertiesBuilder() + } + return b.basic +} + +func (b *AuthPropertiesBuilder) Bearer() *BearerAuthPropertiesBuilder { + if b.bearer == nil { + b.bearer = NewBearerAuthPropertiesBuilder() + } + return b.bearer +} + +func (b *AuthPropertiesBuilder) OAuth2() *OAuth2AuthPropertiesBuilder { + if b.oauth2 == nil { + b.oauth2 = NewOAuth2AuthPropertiesBuilder() + } + return b.oauth2 +} + +func (b *AuthPropertiesBuilder) Build() AuthProperties { + if b.basic != nil { + basic := b.basic.Build() + b.model.Basic = &basic + } + if b.bearer != nil { + bearer := b.bearer.Build() + b.model.Bearer = &bearer + } + if b.oauth2 != nil { + oauth2 := b.oauth2.Build() + b.model.OAuth2 = &oauth2 + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewAuthsBuilder() *AuthsBuilder { + builder := &AuthsBuilder{} + builder.model = Auths{} + return builder +} + +type AuthsBuilder struct { + model Auths +} + +func (b *AuthsBuilder) Build() Auths { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBaseStateBuilder() *BaseStateBuilder { + builder := &BaseStateBuilder{} + builder.model = BaseState{} + builder.onerrors = []*OnErrorBuilder{} + return builder +} + +type BaseStateBuilder struct { + model BaseState + onerrors []*OnErrorBuilder + transition *TransitionBuilder + statedatafilter *StateDataFilterBuilder + end *EndBuilder +} + +func (b *BaseStateBuilder) ID(input string) *BaseStateBuilder { + b.model.ID = input + return b +} + +func (b *BaseStateBuilder) Name(input string) *BaseStateBuilder { + b.model.Name = input + return b +} + +func (b *BaseStateBuilder) Type(input StateType) *BaseStateBuilder { + b.model.Type = input + return b +} + +func (b *BaseStateBuilder) AddOnErrors() *OnErrorBuilder { + builder := NewOnErrorBuilder() + b.onerrors = append(b.onerrors, builder) + return builder +} + +func (b *BaseStateBuilder) RemoveOnErrors(remove *OnErrorBuilder) { + for i, val := range b.onerrors { + if val == remove { + b.onerrors[i] = b.onerrors[len(b.onerrors)-1] + b.onerrors = b.onerrors[:len(b.onerrors)-1] + } + } +} +func (b *BaseStateBuilder) Transition() *TransitionBuilder { + if b.transition == nil { + b.transition = NewTransitionBuilder() + } + return b.transition +} + +func (b *BaseStateBuilder) StateDataFilter() *StateDataFilterBuilder { + if b.statedatafilter == nil { + b.statedatafilter = NewStateDataFilterBuilder() + } + return b.statedatafilter +} + +func (b *BaseStateBuilder) CompensatedBy(input string) *BaseStateBuilder { + b.model.CompensatedBy = input + return b +} + +func (b *BaseStateBuilder) UsedForCompensation(input bool) *BaseStateBuilder { + b.model.UsedForCompensation = input + return b +} + +func (b *BaseStateBuilder) End() *EndBuilder { + if b.end == nil { + b.end = NewEndBuilder() + } + return b.end +} + +func (b *BaseStateBuilder) Build() BaseState { + b.model.OnErrors = []OnError{} + for _, v := range b.onerrors { + b.model.OnErrors = append(b.model.OnErrors, v.Build()) + } + if b.transition != nil { + transition := b.transition.Build() + b.model.Transition = &transition + } + if b.statedatafilter != nil { + statedatafilter := b.statedatafilter.Build() + b.model.StateDataFilter = &statedatafilter + } + if b.end != nil { + end := b.end.Build() + b.model.End = &end + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBaseWorkflowBuilder() *BaseWorkflowBuilder { + builder := &BaseWorkflowBuilder{} + builder.model = BaseWorkflow{} + builder.model.ApplyDefault() + builder.errors = []*ErrorBuilder{} + builder.auth = []*AuthBuilder{} + return builder +} + +type BaseWorkflowBuilder struct { + model BaseWorkflow + start *StartBuilder + datainputschema *DataInputSchemaBuilder + constants *ConstantsBuilder + timeouts *TimeoutsBuilder + errors []*ErrorBuilder + auth []*AuthBuilder +} + +func (b *BaseWorkflowBuilder) ID(input string) *BaseWorkflowBuilder { + b.model.ID = input + return b +} + +func (b *BaseWorkflowBuilder) Key(input string) *BaseWorkflowBuilder { + b.model.Key = input + return b +} + +func (b *BaseWorkflowBuilder) Name(input string) *BaseWorkflowBuilder { + b.model.Name = input + return b +} + +func (b *BaseWorkflowBuilder) Description(input string) *BaseWorkflowBuilder { + b.model.Description = input + return b +} + +func (b *BaseWorkflowBuilder) Version(input string) *BaseWorkflowBuilder { + b.model.Version = input + return b +} + +func (b *BaseWorkflowBuilder) Start() *StartBuilder { + if b.start == nil { + b.start = NewStartBuilder() + } + return b.start +} + +func (b *BaseWorkflowBuilder) Annotations(input []string) *BaseWorkflowBuilder { + b.model.Annotations = input + return b +} + +func (b *BaseWorkflowBuilder) DataInputSchema() *DataInputSchemaBuilder { + if b.datainputschema == nil { + b.datainputschema = NewDataInputSchemaBuilder() + } + return b.datainputschema +} + +func (b *BaseWorkflowBuilder) SpecVersion(input string) *BaseWorkflowBuilder { + b.model.SpecVersion = input + return b +} + +func (b *BaseWorkflowBuilder) Secrets(input Secrets) *BaseWorkflowBuilder { + b.model.Secrets = input + return b +} + +func (b *BaseWorkflowBuilder) Constants() *ConstantsBuilder { + if b.constants == nil { + b.constants = NewConstantsBuilder() + } + return b.constants +} + +func (b *BaseWorkflowBuilder) ExpressionLang(input ExpressionLangType) *BaseWorkflowBuilder { + b.model.ExpressionLang = input + return b +} + +func (b *BaseWorkflowBuilder) Timeouts() *TimeoutsBuilder { + if b.timeouts == nil { + b.timeouts = NewTimeoutsBuilder() + } + return b.timeouts +} + +func (b *BaseWorkflowBuilder) AddErrors() *ErrorBuilder { + builder := NewErrorBuilder() + b.errors = append(b.errors, builder) + return builder +} + +func (b *BaseWorkflowBuilder) RemoveErrors(remove *ErrorBuilder) { + for i, val := range b.errors { + if val == remove { + b.errors[i] = b.errors[len(b.errors)-1] + b.errors = b.errors[:len(b.errors)-1] + } + } +} +func (b *BaseWorkflowBuilder) KeepActive(input bool) *BaseWorkflowBuilder { + b.model.KeepActive = input + return b +} + +func (b *BaseWorkflowBuilder) Metadata(input Metadata) *BaseWorkflowBuilder { + b.model.Metadata = input + return b +} + +func (b *BaseWorkflowBuilder) AutoRetries(input bool) *BaseWorkflowBuilder { + b.model.AutoRetries = input + return b +} + +func (b *BaseWorkflowBuilder) AddAuth() *AuthBuilder { + builder := NewAuthBuilder() + b.auth = append(b.auth, builder) + return builder +} + +func (b *BaseWorkflowBuilder) RemoveAuth(remove *AuthBuilder) { + for i, val := range b.auth { + if val == remove { + b.auth[i] = b.auth[len(b.auth)-1] + b.auth = b.auth[:len(b.auth)-1] + } + } +} +func (b *BaseWorkflowBuilder) Build() BaseWorkflow { + if b.start != nil { + start := b.start.Build() + b.model.Start = &start + } + if b.datainputschema != nil { + datainputschema := b.datainputschema.Build() + b.model.DataInputSchema = &datainputschema + } + if b.constants != nil { + constants := b.constants.Build() + b.model.Constants = &constants + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + b.model.Errors = []Error{} + for _, v := range b.errors { + b.model.Errors = append(b.model.Errors, v.Build()) + } + b.model.Auth = []Auth{} + for _, v := range b.auth { + b.model.Auth = append(b.model.Auth, v.Build()) + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBasicAuthPropertiesBuilder() *BasicAuthPropertiesBuilder { + builder := &BasicAuthPropertiesBuilder{} + builder.model = BasicAuthProperties{} + builder.CommonBuilder = *NewCommonBuilder() + return builder +} + +type BasicAuthPropertiesBuilder struct { + model BasicAuthProperties + CommonBuilder +} + +func (b *BasicAuthPropertiesBuilder) Common() *CommonBuilder { + return &b.CommonBuilder +} + +func (b *BasicAuthPropertiesBuilder) Secret(input string) *BasicAuthPropertiesBuilder { + b.model.Secret = input + return b +} + +func (b *BasicAuthPropertiesBuilder) Username(input string) *BasicAuthPropertiesBuilder { + b.model.Username = input + return b +} + +func (b *BasicAuthPropertiesBuilder) Password(input string) *BasicAuthPropertiesBuilder { + b.model.Password = input + return b +} + +func (b *BasicAuthPropertiesBuilder) Build() BasicAuthProperties { + b.model.Common = b.CommonBuilder.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBearerAuthPropertiesBuilder() *BearerAuthPropertiesBuilder { + builder := &BearerAuthPropertiesBuilder{} + builder.model = BearerAuthProperties{} + builder.CommonBuilder = *NewCommonBuilder() + return builder +} + +type BearerAuthPropertiesBuilder struct { + model BearerAuthProperties + CommonBuilder +} + +func (b *BearerAuthPropertiesBuilder) Common() *CommonBuilder { + return &b.CommonBuilder +} + +func (b *BearerAuthPropertiesBuilder) Secret(input string) *BearerAuthPropertiesBuilder { + b.model.Secret = input + return b +} + +func (b *BearerAuthPropertiesBuilder) Token(input string) *BearerAuthPropertiesBuilder { + b.model.Token = input + return b +} + +func (b *BearerAuthPropertiesBuilder) Build() BearerAuthProperties { + b.model.Common = b.CommonBuilder.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBranchBuilder() *BranchBuilder { + builder := &BranchBuilder{} + builder.model = Branch{} + builder.actions = []*ActionBuilder{} + return builder +} + +type BranchBuilder struct { + model Branch + actions []*ActionBuilder + timeouts *BranchTimeoutsBuilder +} + +func (b *BranchBuilder) Name(input string) *BranchBuilder { + b.model.Name = input + return b +} + +func (b *BranchBuilder) AddActions() *ActionBuilder { + builder := NewActionBuilder() + b.actions = append(b.actions, builder) + return builder +} + +func (b *BranchBuilder) RemoveActions(remove *ActionBuilder) { + for i, val := range b.actions { + if val == remove { + b.actions[i] = b.actions[len(b.actions)-1] + b.actions = b.actions[:len(b.actions)-1] + } + } +} +func (b *BranchBuilder) Timeouts() *BranchTimeoutsBuilder { + if b.timeouts == nil { + b.timeouts = NewBranchTimeoutsBuilder() + } + return b.timeouts +} + +func (b *BranchBuilder) Build() Branch { + b.model.Actions = []Action{} + for _, v := range b.actions { + b.model.Actions = append(b.model.Actions, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewBranchTimeoutsBuilder() *BranchTimeoutsBuilder { + builder := &BranchTimeoutsBuilder{} + builder.model = BranchTimeouts{} + return builder +} + +type BranchTimeoutsBuilder struct { + model BranchTimeouts +} + +func (b *BranchTimeoutsBuilder) ActionExecTimeout(input string) *BranchTimeoutsBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *BranchTimeoutsBuilder) BranchExecTimeout(input string) *BranchTimeoutsBuilder { + b.model.BranchExecTimeout = input + return b +} + +func (b *BranchTimeoutsBuilder) Build() BranchTimeouts { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewCallbackStateBuilder() *CallbackStateBuilder { + builder := &CallbackStateBuilder{} + builder.model = CallbackState{} + builder.action = NewActionBuilder() + return builder +} + +type CallbackStateBuilder struct { + model CallbackState + action *ActionBuilder + timeouts *CallbackStateTimeoutBuilder + eventdatafilter *EventDataFilterBuilder +} + +func (b *CallbackStateBuilder) Action() *ActionBuilder { + return b.action +} + +func (b *CallbackStateBuilder) EventRef(input string) *CallbackStateBuilder { + b.model.EventRef = input + return b +} + +func (b *CallbackStateBuilder) Timeouts() *CallbackStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewCallbackStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *CallbackStateBuilder) EventDataFilter() *EventDataFilterBuilder { + if b.eventdatafilter == nil { + b.eventdatafilter = NewEventDataFilterBuilder() + } + return b.eventdatafilter +} + +func (b *CallbackStateBuilder) Build() CallbackState { + b.model.Action = b.action.Build() + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + if b.eventdatafilter != nil { + eventdatafilter := b.eventdatafilter.Build() + b.model.EventDataFilter = &eventdatafilter + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewCallbackStateTimeoutBuilder() *CallbackStateTimeoutBuilder { + builder := &CallbackStateTimeoutBuilder{} + builder.model = CallbackStateTimeout{} + return builder +} + +type CallbackStateTimeoutBuilder struct { + model CallbackStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *CallbackStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *CallbackStateTimeoutBuilder) ActionExecTimeout(input string) *CallbackStateTimeoutBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *CallbackStateTimeoutBuilder) EventTimeout(input string) *CallbackStateTimeoutBuilder { + b.model.EventTimeout = input + return b +} + +func (b *CallbackStateTimeoutBuilder) Build() CallbackStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewCommonBuilder() *CommonBuilder { + builder := &CommonBuilder{} + builder.model = Common{} + return builder +} + +type CommonBuilder struct { + model Common +} + +func (b *CommonBuilder) Metadata(input Metadata) *CommonBuilder { + b.model.Metadata = input + return b +} + +func (b *CommonBuilder) Build() Common { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewConstantsBuilder() *ConstantsBuilder { + builder := &ConstantsBuilder{} + builder.model = Constants{} + return builder +} + +type ConstantsBuilder struct { + model Constants +} + +func (b *ConstantsBuilder) Data(input ConstantsData) *ConstantsBuilder { + b.model.Data = input + return b +} + +func (b *ConstantsBuilder) Build() Constants { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewConstantsDataBuilder() *ConstantsDataBuilder { + builder := &ConstantsDataBuilder{} + builder.model = ConstantsData{} + return builder +} + +type ConstantsDataBuilder struct { + model ConstantsData +} + +func (b *ConstantsDataBuilder) Build() ConstantsData { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewContinueAsBuilder() *ContinueAsBuilder { + builder := &ContinueAsBuilder{} + builder.model = ContinueAs{} + builder.data = NewObjectBuilder() + builder.workflowexectimeout = NewWorkflowExecTimeoutBuilder() + return builder +} + +type ContinueAsBuilder struct { + model ContinueAs + data *ObjectBuilder + workflowexectimeout *WorkflowExecTimeoutBuilder +} + +func (b *ContinueAsBuilder) WorkflowID(input string) *ContinueAsBuilder { + b.model.WorkflowID = input + return b +} + +func (b *ContinueAsBuilder) Version(input string) *ContinueAsBuilder { + b.model.Version = input + return b +} + +func (b *ContinueAsBuilder) Data() *ObjectBuilder { + return b.data +} + +func (b *ContinueAsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { + return b.workflowexectimeout +} + +func (b *ContinueAsBuilder) Build() ContinueAs { + b.model.Data = b.data.Build() + b.model.WorkflowExecTimeout = b.workflowexectimeout.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewCorrelationBuilder() *CorrelationBuilder { + builder := &CorrelationBuilder{} + builder.model = Correlation{} + return builder +} + +type CorrelationBuilder struct { + model Correlation +} + +func (b *CorrelationBuilder) ContextAttributeName(input string) *CorrelationBuilder { + b.model.ContextAttributeName = input + return b +} + +func (b *CorrelationBuilder) ContextAttributeValue(input string) *CorrelationBuilder { + b.model.ContextAttributeValue = input + return b +} + +func (b *CorrelationBuilder) Build() Correlation { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewCronBuilder() *CronBuilder { + builder := &CronBuilder{} + builder.model = Cron{} + return builder +} + +type CronBuilder struct { + model Cron +} + +func (b *CronBuilder) Expression(input string) *CronBuilder { + b.model.Expression = input + return b +} + +func (b *CronBuilder) ValidUntil(input string) *CronBuilder { + b.model.ValidUntil = input + return b +} + +func (b *CronBuilder) Build() Cron { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewDataConditionBuilder() *DataConditionBuilder { + builder := &DataConditionBuilder{} + builder.model = DataCondition{} + return builder +} + +type DataConditionBuilder struct { + model DataCondition + end *EndBuilder + transition *TransitionBuilder +} + +func (b *DataConditionBuilder) Name(input string) *DataConditionBuilder { + b.model.Name = input + return b +} + +func (b *DataConditionBuilder) Condition(input string) *DataConditionBuilder { + b.model.Condition = input + return b +} + +func (b *DataConditionBuilder) Metadata(input Metadata) *DataConditionBuilder { + b.model.Metadata = input + return b +} + +func (b *DataConditionBuilder) End() *EndBuilder { + if b.end == nil { + b.end = NewEndBuilder() + } + return b.end +} + +func (b *DataConditionBuilder) Transition() *TransitionBuilder { + if b.transition == nil { + b.transition = NewTransitionBuilder() + } + return b.transition +} + +func (b *DataConditionBuilder) Build() DataCondition { + if b.end != nil { + end := b.end.Build() + b.model.End = &end + } + if b.transition != nil { + transition := b.transition.Build() + b.model.Transition = &transition + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewDataInputSchemaBuilder() *DataInputSchemaBuilder { + builder := &DataInputSchemaBuilder{} + builder.model = DataInputSchema{} + builder.model.ApplyDefault() + return builder +} + +type DataInputSchemaBuilder struct { + model DataInputSchema +} + +func (b *DataInputSchemaBuilder) Schema(input string) *DataInputSchemaBuilder { + b.model.Schema = input + return b +} + +func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSchemaBuilder { + b.model.FailOnValidationErrors = input + return b +} + +func (b *DataInputSchemaBuilder) Build() DataInputSchema { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewDefaultConditionBuilder() *DefaultConditionBuilder { + builder := &DefaultConditionBuilder{} + builder.model = DefaultCondition{} + return builder +} + +type DefaultConditionBuilder struct { + model DefaultCondition + transition *TransitionBuilder + end *EndBuilder +} + +func (b *DefaultConditionBuilder) Transition() *TransitionBuilder { + if b.transition == nil { + b.transition = NewTransitionBuilder() + } + return b.transition +} + +func (b *DefaultConditionBuilder) End() *EndBuilder { + if b.end == nil { + b.end = NewEndBuilder() + } + return b.end +} + +func (b *DefaultConditionBuilder) Build() DefaultCondition { + if b.transition != nil { + transition := b.transition.Build() + b.model.Transition = &transition + } + if b.end != nil { + end := b.end.Build() + b.model.End = &end + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewDelayStateBuilder() *DelayStateBuilder { + builder := &DelayStateBuilder{} + builder.model = DelayState{} + return builder +} + +type DelayStateBuilder struct { + model DelayState +} + +func (b *DelayStateBuilder) TimeDelay(input string) *DelayStateBuilder { + b.model.TimeDelay = input + return b +} + +func (b *DelayStateBuilder) Build() DelayState { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEndBuilder() *EndBuilder { + builder := &EndBuilder{} + builder.model = End{} + builder.produceevents = []*ProduceEventBuilder{} + return builder +} + +type EndBuilder struct { + model End + produceevents []*ProduceEventBuilder + continueas *ContinueAsBuilder +} + +func (b *EndBuilder) Terminate(input bool) *EndBuilder { + b.model.Terminate = input + return b +} + +func (b *EndBuilder) AddProduceEvents() *ProduceEventBuilder { + builder := NewProduceEventBuilder() + b.produceevents = append(b.produceevents, builder) + return builder +} + +func (b *EndBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { + for i, val := range b.produceevents { + if val == remove { + b.produceevents[i] = b.produceevents[len(b.produceevents)-1] + b.produceevents = b.produceevents[:len(b.produceevents)-1] + } + } +} +func (b *EndBuilder) Compensate(input bool) *EndBuilder { + b.model.Compensate = input + return b +} + +func (b *EndBuilder) ContinueAs() *ContinueAsBuilder { + if b.continueas == nil { + b.continueas = NewContinueAsBuilder() + } + return b.continueas +} + +func (b *EndBuilder) Build() End { + b.model.ProduceEvents = []ProduceEvent{} + for _, v := range b.produceevents { + b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) + } + if b.continueas != nil { + continueas := b.continueas.Build() + b.model.ContinueAs = &continueas + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewErrorBuilder() *ErrorBuilder { + builder := &ErrorBuilder{} + builder.model = Error{} + return builder +} + +type ErrorBuilder struct { + model Error +} + +func (b *ErrorBuilder) Name(input string) *ErrorBuilder { + b.model.Name = input + return b +} + +func (b *ErrorBuilder) Code(input string) *ErrorBuilder { + b.model.Code = input + return b +} + +func (b *ErrorBuilder) Description(input string) *ErrorBuilder { + b.model.Description = input + return b +} + +func (b *ErrorBuilder) Build() Error { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewErrorsBuilder() *ErrorsBuilder { + builder := &ErrorsBuilder{} + builder.model = Errors{} + return builder +} + +type ErrorsBuilder struct { + model Errors +} + +func (b *ErrorsBuilder) Build() Errors { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventBuilder() *EventBuilder { + builder := &EventBuilder{} + builder.model = Event{} + builder.model.ApplyDefault() + builder.CommonBuilder = *NewCommonBuilder() + builder.correlation = []*CorrelationBuilder{} + return builder +} + +type EventBuilder struct { + model Event + CommonBuilder + correlation []*CorrelationBuilder +} + +func (b *EventBuilder) Common() *CommonBuilder { + return &b.CommonBuilder +} + +func (b *EventBuilder) Name(input string) *EventBuilder { + b.model.Name = input + return b +} + +func (b *EventBuilder) Source(input string) *EventBuilder { + b.model.Source = input + return b +} + +func (b *EventBuilder) Type(input string) *EventBuilder { + b.model.Type = input + return b +} + +func (b *EventBuilder) Kind(input EventKind) *EventBuilder { + b.model.Kind = input + return b +} + +func (b *EventBuilder) DataOnly(input bool) *EventBuilder { + b.model.DataOnly = input + return b +} + +func (b *EventBuilder) AddCorrelation() *CorrelationBuilder { + builder := NewCorrelationBuilder() + b.correlation = append(b.correlation, builder) + return builder +} + +func (b *EventBuilder) RemoveCorrelation(remove *CorrelationBuilder) { + for i, val := range b.correlation { + if val == remove { + b.correlation[i] = b.correlation[len(b.correlation)-1] + b.correlation = b.correlation[:len(b.correlation)-1] + } + } +} +func (b *EventBuilder) Build() Event { + b.model.Common = b.CommonBuilder.Build() + b.model.Correlation = []Correlation{} + for _, v := range b.correlation { + b.model.Correlation = append(b.model.Correlation, v.Build()) + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventConditionBuilder() *EventConditionBuilder { + builder := &EventConditionBuilder{} + builder.model = EventCondition{} + return builder +} + +type EventConditionBuilder struct { + model EventCondition + eventdatafilter *EventDataFilterBuilder + end *EndBuilder + transition *TransitionBuilder +} + +func (b *EventConditionBuilder) Name(input string) *EventConditionBuilder { + b.model.Name = input + return b +} + +func (b *EventConditionBuilder) EventRef(input string) *EventConditionBuilder { + b.model.EventRef = input + return b +} + +func (b *EventConditionBuilder) EventDataFilter() *EventDataFilterBuilder { + if b.eventdatafilter == nil { + b.eventdatafilter = NewEventDataFilterBuilder() + } + return b.eventdatafilter +} + +func (b *EventConditionBuilder) Metadata(input Metadata) *EventConditionBuilder { + b.model.Metadata = input + return b +} + +func (b *EventConditionBuilder) End() *EndBuilder { + if b.end == nil { + b.end = NewEndBuilder() + } + return b.end +} + +func (b *EventConditionBuilder) Transition() *TransitionBuilder { + if b.transition == nil { + b.transition = NewTransitionBuilder() + } + return b.transition +} + +func (b *EventConditionBuilder) Build() EventCondition { + if b.eventdatafilter != nil { + eventdatafilter := b.eventdatafilter.Build() + b.model.EventDataFilter = &eventdatafilter + } + if b.end != nil { + end := b.end.Build() + b.model.End = &end + } + if b.transition != nil { + transition := b.transition.Build() + b.model.Transition = &transition + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventConditionsBuilder() *EventConditionsBuilder { + builder := &EventConditionsBuilder{} + builder.model = EventConditions{} + return builder +} + +type EventConditionsBuilder struct { + model EventConditions +} + +func (b *EventConditionsBuilder) Build() EventConditions { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventDataFilterBuilder() *EventDataFilterBuilder { + builder := &EventDataFilterBuilder{} + builder.model = EventDataFilter{} + builder.model.ApplyDefault() + return builder +} + +type EventDataFilterBuilder struct { + model EventDataFilter +} + +func (b *EventDataFilterBuilder) UseData(input bool) *EventDataFilterBuilder { + b.model.UseData = input + return b +} + +func (b *EventDataFilterBuilder) Data(input string) *EventDataFilterBuilder { + b.model.Data = input + return b +} + +func (b *EventDataFilterBuilder) ToStateData(input string) *EventDataFilterBuilder { + b.model.ToStateData = input + return b +} + +func (b *EventDataFilterBuilder) Build() EventDataFilter { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventRefBuilder() *EventRefBuilder { + builder := &EventRefBuilder{} + builder.model = EventRef{} + builder.model.ApplyDefault() + return builder +} + +type EventRefBuilder struct { + model EventRef + data *ObjectBuilder +} + +func (b *EventRefBuilder) TriggerEventRef(input string) *EventRefBuilder { + b.model.TriggerEventRef = input + return b +} + +func (b *EventRefBuilder) ResultEventRef(input string) *EventRefBuilder { + b.model.ResultEventRef = input + return b +} + +func (b *EventRefBuilder) ResultEventTimeout(input string) *EventRefBuilder { + b.model.ResultEventTimeout = input + return b +} + +func (b *EventRefBuilder) Data() *ObjectBuilder { + if b.data == nil { + b.data = NewObjectBuilder() + } + return b.data +} + +func (b *EventRefBuilder) ContextAttributes(input map[string]Object) *EventRefBuilder { + b.model.ContextAttributes = input + return b +} + +func (b *EventRefBuilder) Invoke(input InvokeKind) *EventRefBuilder { + b.model.Invoke = input + return b +} + +func (b *EventRefBuilder) Build() EventRef { + if b.data != nil { + data := b.data.Build() + b.model.Data = &data + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventStateBuilder() *EventStateBuilder { + builder := &EventStateBuilder{} + builder.model = EventState{} + builder.model.ApplyDefault() + builder.onevents = []*OnEventsBuilder{} + return builder +} + +type EventStateBuilder struct { + model EventState + onevents []*OnEventsBuilder + timeouts *EventStateTimeoutBuilder +} + +func (b *EventStateBuilder) Exclusive(input bool) *EventStateBuilder { + b.model.Exclusive = input + return b +} + +func (b *EventStateBuilder) AddOnEvents() *OnEventsBuilder { + builder := NewOnEventsBuilder() + b.onevents = append(b.onevents, builder) + return builder +} + +func (b *EventStateBuilder) RemoveOnEvents(remove *OnEventsBuilder) { + for i, val := range b.onevents { + if val == remove { + b.onevents[i] = b.onevents[len(b.onevents)-1] + b.onevents = b.onevents[:len(b.onevents)-1] + } + } +} +func (b *EventStateBuilder) Timeouts() *EventStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewEventStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *EventStateBuilder) Build() EventState { + b.model.OnEvents = []OnEvents{} + for _, v := range b.onevents { + b.model.OnEvents = append(b.model.OnEvents, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventStateTimeoutBuilder() *EventStateTimeoutBuilder { + builder := &EventStateTimeoutBuilder{} + builder.model = EventStateTimeout{} + return builder +} + +type EventStateTimeoutBuilder struct { + model EventStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *EventStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *EventStateTimeoutBuilder) ActionExecTimeout(input string) *EventStateTimeoutBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *EventStateTimeoutBuilder) EventTimeout(input string) *EventStateTimeoutBuilder { + b.model.EventTimeout = input + return b +} + +func (b *EventStateTimeoutBuilder) Build() EventStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewEventsBuilder() *EventsBuilder { + builder := &EventsBuilder{} + builder.model = Events{} + return builder +} + +type EventsBuilder struct { + model Events +} + +func (b *EventsBuilder) Build() Events { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewForEachStateBuilder() *ForEachStateBuilder { + builder := &ForEachStateBuilder{} + builder.model = ForEachState{} + builder.model.ApplyDefault() + builder.actions = []*ActionBuilder{} + return builder +} + +type ForEachStateBuilder struct { + model ForEachState + actions []*ActionBuilder + timeouts *ForEachStateTimeoutBuilder +} + +func (b *ForEachStateBuilder) InputCollection(input string) *ForEachStateBuilder { + b.model.InputCollection = input + return b +} + +func (b *ForEachStateBuilder) OutputCollection(input string) *ForEachStateBuilder { + b.model.OutputCollection = input + return b +} + +func (b *ForEachStateBuilder) IterationParam(input string) *ForEachStateBuilder { + b.model.IterationParam = input + return b +} + +func (b *ForEachStateBuilder) BatchSize(input *intstr.IntOrString) *ForEachStateBuilder { + b.model.BatchSize = input + return b +} + +func (b *ForEachStateBuilder) AddActions() *ActionBuilder { + builder := NewActionBuilder() + b.actions = append(b.actions, builder) + return builder +} + +func (b *ForEachStateBuilder) RemoveActions(remove *ActionBuilder) { + for i, val := range b.actions { + if val == remove { + b.actions[i] = b.actions[len(b.actions)-1] + b.actions = b.actions[:len(b.actions)-1] + } + } +} +func (b *ForEachStateBuilder) Timeouts() *ForEachStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewForEachStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *ForEachStateBuilder) Mode(input ForEachModeType) *ForEachStateBuilder { + b.model.Mode = input + return b +} + +func (b *ForEachStateBuilder) Build() ForEachState { + b.model.Actions = []Action{} + for _, v := range b.actions { + b.model.Actions = append(b.model.Actions, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewForEachStateTimeoutBuilder() *ForEachStateTimeoutBuilder { + builder := &ForEachStateTimeoutBuilder{} + builder.model = ForEachStateTimeout{} + return builder +} + +type ForEachStateTimeoutBuilder struct { + model ForEachStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *ForEachStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *ForEachStateTimeoutBuilder) ActionExecTimeout(input string) *ForEachStateTimeoutBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *ForEachStateTimeoutBuilder) Build() ForEachStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewFunctionBuilder() *FunctionBuilder { + builder := &FunctionBuilder{} + builder.model = Function{} + builder.model.ApplyDefault() + builder.CommonBuilder = *NewCommonBuilder() + return builder +} + +type FunctionBuilder struct { + model Function + CommonBuilder +} + +func (b *FunctionBuilder) Common() *CommonBuilder { + return &b.CommonBuilder +} + +func (b *FunctionBuilder) Name(input string) *FunctionBuilder { + b.model.Name = input + return b +} + +func (b *FunctionBuilder) Operation(input string) *FunctionBuilder { + b.model.Operation = input + return b +} + +func (b *FunctionBuilder) Type(input FunctionType) *FunctionBuilder { + b.model.Type = input + return b +} + +func (b *FunctionBuilder) AuthRef(input string) *FunctionBuilder { + b.model.AuthRef = input + return b +} + +func (b *FunctionBuilder) Build() Function { + b.model.Common = b.CommonBuilder.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewFunctionRefBuilder() *FunctionRefBuilder { + builder := &FunctionRefBuilder{} + builder.model = FunctionRef{} + builder.model.ApplyDefault() + return builder +} + +type FunctionRefBuilder struct { + model FunctionRef +} + +func (b *FunctionRefBuilder) RefName(input string) *FunctionRefBuilder { + b.model.RefName = input + return b +} + +func (b *FunctionRefBuilder) Arguments(input map[string]Object) *FunctionRefBuilder { + b.model.Arguments = input + return b +} + +func (b *FunctionRefBuilder) SelectionSet(input string) *FunctionRefBuilder { + b.model.SelectionSet = input + return b +} + +func (b *FunctionRefBuilder) Invoke(input InvokeKind) *FunctionRefBuilder { + b.model.Invoke = input + return b +} + +func (b *FunctionRefBuilder) Build() FunctionRef { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewFunctionsBuilder() *FunctionsBuilder { + builder := &FunctionsBuilder{} + builder.model = Functions{} + return builder +} + +type FunctionsBuilder struct { + model Functions +} + +func (b *FunctionsBuilder) Build() Functions { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewInjectStateBuilder() *InjectStateBuilder { + builder := &InjectStateBuilder{} + builder.model = InjectState{} + return builder +} + +type InjectStateBuilder struct { + model InjectState + timeouts *InjectStateTimeoutBuilder +} + +func (b *InjectStateBuilder) Data(input map[string]Object) *InjectStateBuilder { + b.model.Data = input + return b +} + +func (b *InjectStateBuilder) Timeouts() *InjectStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewInjectStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *InjectStateBuilder) Build() InjectState { + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewInjectStateTimeoutBuilder() *InjectStateTimeoutBuilder { + builder := &InjectStateTimeoutBuilder{} + builder.model = InjectStateTimeout{} + return builder +} + +type InjectStateTimeoutBuilder struct { + model InjectStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *InjectStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *InjectStateTimeoutBuilder) Build() InjectStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewMetadataBuilder() *MetadataBuilder { + builder := &MetadataBuilder{} + builder.model = Metadata{} + return builder +} + +type MetadataBuilder struct { + model Metadata +} + +func (b *MetadataBuilder) Build() Metadata { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewOAuth2AuthPropertiesBuilder() *OAuth2AuthPropertiesBuilder { + builder := &OAuth2AuthPropertiesBuilder{} + builder.model = OAuth2AuthProperties{} + builder.CommonBuilder = *NewCommonBuilder() + return builder +} + +type OAuth2AuthPropertiesBuilder struct { + model OAuth2AuthProperties + CommonBuilder +} + +func (b *OAuth2AuthPropertiesBuilder) Common() *CommonBuilder { + return &b.CommonBuilder +} + +func (b *OAuth2AuthPropertiesBuilder) Secret(input string) *OAuth2AuthPropertiesBuilder { + b.model.Secret = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Authority(input string) *OAuth2AuthPropertiesBuilder { + b.model.Authority = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) GrantType(input GrantType) *OAuth2AuthPropertiesBuilder { + b.model.GrantType = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) ClientID(input string) *OAuth2AuthPropertiesBuilder { + b.model.ClientID = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) ClientSecret(input string) *OAuth2AuthPropertiesBuilder { + b.model.ClientSecret = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Scopes(input []string) *OAuth2AuthPropertiesBuilder { + b.model.Scopes = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Username(input string) *OAuth2AuthPropertiesBuilder { + b.model.Username = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Password(input string) *OAuth2AuthPropertiesBuilder { + b.model.Password = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Audiences(input []string) *OAuth2AuthPropertiesBuilder { + b.model.Audiences = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) SubjectToken(input string) *OAuth2AuthPropertiesBuilder { + b.model.SubjectToken = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) RequestedSubject(input string) *OAuth2AuthPropertiesBuilder { + b.model.RequestedSubject = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) RequestedIssuer(input string) *OAuth2AuthPropertiesBuilder { + b.model.RequestedIssuer = input + return b +} + +func (b *OAuth2AuthPropertiesBuilder) Build() OAuth2AuthProperties { + b.model.Common = b.CommonBuilder.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewObjectBuilder() *ObjectBuilder { + builder := &ObjectBuilder{} + builder.model = Object{} + builder.slicevalue = []*ObjectBuilder{} + return builder +} + +type ObjectBuilder struct { + model Object + slicevalue []*ObjectBuilder +} + +func (b *ObjectBuilder) Type(input Type) *ObjectBuilder { + b.model.Type = input + return b +} + +func (b *ObjectBuilder) StringValue(input string) *ObjectBuilder { + b.model.StringValue = input + return b +} + +func (b *ObjectBuilder) IntValue(input int32) *ObjectBuilder { + b.model.IntValue = input + return b +} + +func (b *ObjectBuilder) FloatValue(input float64) *ObjectBuilder { + b.model.FloatValue = input + return b +} + +func (b *ObjectBuilder) MapValue(input map[string]Object) *ObjectBuilder { + b.model.MapValue = input + return b +} + +func (b *ObjectBuilder) AddSliceValue() *ObjectBuilder { + builder := NewObjectBuilder() + b.slicevalue = append(b.slicevalue, builder) + return builder +} + +func (b *ObjectBuilder) RemoveSliceValue(remove *ObjectBuilder) { + for i, val := range b.slicevalue { + if val == remove { + b.slicevalue[i] = b.slicevalue[len(b.slicevalue)-1] + b.slicevalue = b.slicevalue[:len(b.slicevalue)-1] + } + } +} +func (b *ObjectBuilder) BoolValue(input bool) *ObjectBuilder { + b.model.BoolValue = input + return b +} + +func (b *ObjectBuilder) Build() Object { + b.model.SliceValue = []Object{} + for _, v := range b.slicevalue { + b.model.SliceValue = append(b.model.SliceValue, v.Build()) + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewOnErrorBuilder() *OnErrorBuilder { + builder := &OnErrorBuilder{} + builder.model = OnError{} + return builder +} + +type OnErrorBuilder struct { + model OnError + transition *TransitionBuilder + end *EndBuilder +} + +func (b *OnErrorBuilder) ErrorRef(input string) *OnErrorBuilder { + b.model.ErrorRef = input + return b +} + +func (b *OnErrorBuilder) ErrorRefs(input []string) *OnErrorBuilder { + b.model.ErrorRefs = input + return b +} + +func (b *OnErrorBuilder) Transition() *TransitionBuilder { + if b.transition == nil { + b.transition = NewTransitionBuilder() + } + return b.transition +} + +func (b *OnErrorBuilder) End() *EndBuilder { + if b.end == nil { + b.end = NewEndBuilder() + } + return b.end +} + +func (b *OnErrorBuilder) Build() OnError { + if b.transition != nil { + transition := b.transition.Build() + b.model.Transition = &transition + } + if b.end != nil { + end := b.end.Build() + b.model.End = &end + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewOnEventsBuilder() *OnEventsBuilder { + builder := &OnEventsBuilder{} + builder.model = OnEvents{} + builder.model.ApplyDefault() + builder.actions = []*ActionBuilder{} + builder.eventdatafilter = NewEventDataFilterBuilder() + return builder +} + +type OnEventsBuilder struct { + model OnEvents + actions []*ActionBuilder + eventdatafilter *EventDataFilterBuilder +} + +func (b *OnEventsBuilder) EventRefs(input []string) *OnEventsBuilder { + b.model.EventRefs = input + return b +} + +func (b *OnEventsBuilder) ActionMode(input ActionMode) *OnEventsBuilder { + b.model.ActionMode = input + return b +} + +func (b *OnEventsBuilder) AddActions() *ActionBuilder { + builder := NewActionBuilder() + b.actions = append(b.actions, builder) + return builder +} + +func (b *OnEventsBuilder) RemoveActions(remove *ActionBuilder) { + for i, val := range b.actions { + if val == remove { + b.actions[i] = b.actions[len(b.actions)-1] + b.actions = b.actions[:len(b.actions)-1] + } + } +} +func (b *OnEventsBuilder) EventDataFilter() *EventDataFilterBuilder { + return b.eventdatafilter +} + +func (b *OnEventsBuilder) Build() OnEvents { + b.model.Actions = []Action{} + for _, v := range b.actions { + b.model.Actions = append(b.model.Actions, v.Build()) + } + b.model.EventDataFilter = b.eventdatafilter.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewOperationStateBuilder() *OperationStateBuilder { + builder := &OperationStateBuilder{} + builder.model = OperationState{} + builder.model.ApplyDefault() + builder.actions = []*ActionBuilder{} + return builder +} + +type OperationStateBuilder struct { + model OperationState + actions []*ActionBuilder + timeouts *OperationStateTimeoutBuilder +} + +func (b *OperationStateBuilder) ActionMode(input ActionMode) *OperationStateBuilder { + b.model.ActionMode = input + return b +} + +func (b *OperationStateBuilder) AddActions() *ActionBuilder { + builder := NewActionBuilder() + b.actions = append(b.actions, builder) + return builder +} + +func (b *OperationStateBuilder) RemoveActions(remove *ActionBuilder) { + for i, val := range b.actions { + if val == remove { + b.actions[i] = b.actions[len(b.actions)-1] + b.actions = b.actions[:len(b.actions)-1] + } + } +} +func (b *OperationStateBuilder) Timeouts() *OperationStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewOperationStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *OperationStateBuilder) Build() OperationState { + b.model.Actions = []Action{} + for _, v := range b.actions { + b.model.Actions = append(b.model.Actions, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewOperationStateTimeoutBuilder() *OperationStateTimeoutBuilder { + builder := &OperationStateTimeoutBuilder{} + builder.model = OperationStateTimeout{} + return builder +} + +type OperationStateTimeoutBuilder struct { + model OperationStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *OperationStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *OperationStateTimeoutBuilder) ActionExecTimeout(input string) *OperationStateTimeoutBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *OperationStateTimeoutBuilder) Build() OperationStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewParallelStateBuilder() *ParallelStateBuilder { + builder := &ParallelStateBuilder{} + builder.model = ParallelState{} + builder.model.ApplyDefault() + builder.branches = []*BranchBuilder{} + return builder +} + +type ParallelStateBuilder struct { + model ParallelState + branches []*BranchBuilder + timeouts *ParallelStateTimeoutBuilder +} + +func (b *ParallelStateBuilder) AddBranches() *BranchBuilder { + builder := NewBranchBuilder() + b.branches = append(b.branches, builder) + return builder +} + +func (b *ParallelStateBuilder) RemoveBranches(remove *BranchBuilder) { + for i, val := range b.branches { + if val == remove { + b.branches[i] = b.branches[len(b.branches)-1] + b.branches = b.branches[:len(b.branches)-1] + } + } +} +func (b *ParallelStateBuilder) CompletionType(input CompletionType) *ParallelStateBuilder { + b.model.CompletionType = input + return b +} + +func (b *ParallelStateBuilder) NumCompleted(input intstr.IntOrString) *ParallelStateBuilder { + b.model.NumCompleted = input + return b +} + +func (b *ParallelStateBuilder) Timeouts() *ParallelStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewParallelStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *ParallelStateBuilder) Build() ParallelState { + b.model.Branches = []Branch{} + for _, v := range b.branches { + b.model.Branches = append(b.model.Branches, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewParallelStateTimeoutBuilder() *ParallelStateTimeoutBuilder { + builder := &ParallelStateTimeoutBuilder{} + builder.model = ParallelStateTimeout{} + return builder +} + +type ParallelStateTimeoutBuilder struct { + model ParallelStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *ParallelStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *ParallelStateTimeoutBuilder) BranchExecTimeout(input string) *ParallelStateTimeoutBuilder { + b.model.BranchExecTimeout = input + return b +} + +func (b *ParallelStateTimeoutBuilder) Build() ParallelStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewProduceEventBuilder() *ProduceEventBuilder { + builder := &ProduceEventBuilder{} + builder.model = ProduceEvent{} + builder.data = NewObjectBuilder() + return builder +} + +type ProduceEventBuilder struct { + model ProduceEvent + data *ObjectBuilder +} + +func (b *ProduceEventBuilder) EventRef(input string) *ProduceEventBuilder { + b.model.EventRef = input + return b +} + +func (b *ProduceEventBuilder) Data() *ObjectBuilder { + return b.data +} + +func (b *ProduceEventBuilder) ContextAttributes(input map[string]string) *ProduceEventBuilder { + b.model.ContextAttributes = input + return b +} + +func (b *ProduceEventBuilder) Build() ProduceEvent { + b.model.Data = b.data.Build() + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewRetriesBuilder() *RetriesBuilder { + builder := &RetriesBuilder{} + builder.model = Retries{} + return builder +} + +type RetriesBuilder struct { + model Retries +} + +func (b *RetriesBuilder) Build() Retries { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewRetryBuilder() *RetryBuilder { + builder := &RetryBuilder{} + builder.model = Retry{} + builder.model.ApplyDefault() + return builder +} + +type RetryBuilder struct { + model Retry +} + +func (b *RetryBuilder) Name(input string) *RetryBuilder { + b.model.Name = input + return b +} + +func (b *RetryBuilder) Delay(input string) *RetryBuilder { + b.model.Delay = input + return b +} + +func (b *RetryBuilder) MaxDelay(input string) *RetryBuilder { + b.model.MaxDelay = input + return b +} + +func (b *RetryBuilder) Increment(input string) *RetryBuilder { + b.model.Increment = input + return b +} + +func (b *RetryBuilder) MaxAttempts(input intstr.IntOrString) *RetryBuilder { + b.model.MaxAttempts = input + return b +} + +func (b *RetryBuilder) Build() Retry { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewScheduleBuilder() *ScheduleBuilder { + builder := &ScheduleBuilder{} + builder.model = Schedule{} + return builder +} + +type ScheduleBuilder struct { + model Schedule + cron *CronBuilder +} + +func (b *ScheduleBuilder) Interval(input string) *ScheduleBuilder { + b.model.Interval = input + return b +} + +func (b *ScheduleBuilder) Cron() *CronBuilder { + if b.cron == nil { + b.cron = NewCronBuilder() + } + return b.cron +} + +func (b *ScheduleBuilder) Timezone(input string) *ScheduleBuilder { + b.model.Timezone = input + return b +} + +func (b *ScheduleBuilder) Build() Schedule { + if b.cron != nil { + cron := b.cron.Build() + b.model.Cron = &cron + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSecretsBuilder() *SecretsBuilder { + builder := &SecretsBuilder{} + builder.model = Secrets{} + return builder +} + +type SecretsBuilder struct { + model Secrets +} + +func (b *SecretsBuilder) Build() Secrets { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSleepBuilder() *SleepBuilder { + builder := &SleepBuilder{} + builder.model = Sleep{} + return builder +} + +type SleepBuilder struct { + model Sleep +} + +func (b *SleepBuilder) Before(input string) *SleepBuilder { + b.model.Before = input + return b +} + +func (b *SleepBuilder) After(input string) *SleepBuilder { + b.model.After = input + return b +} + +func (b *SleepBuilder) Build() Sleep { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSleepStateBuilder() *SleepStateBuilder { + builder := &SleepStateBuilder{} + builder.model = SleepState{} + return builder +} + +type SleepStateBuilder struct { + model SleepState + timeouts *SleepStateTimeoutBuilder +} + +func (b *SleepStateBuilder) Duration(input string) *SleepStateBuilder { + b.model.Duration = input + return b +} + +func (b *SleepStateBuilder) Timeouts() *SleepStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewSleepStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *SleepStateBuilder) Build() SleepState { + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSleepStateTimeoutBuilder() *SleepStateTimeoutBuilder { + builder := &SleepStateTimeoutBuilder{} + builder.model = SleepStateTimeout{} + return builder +} + +type SleepStateTimeoutBuilder struct { + model SleepStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *SleepStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *SleepStateTimeoutBuilder) Build() SleepStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewStartBuilder() *StartBuilder { + builder := &StartBuilder{} + builder.model = Start{} + return builder +} + +type StartBuilder struct { + model Start + schedule *ScheduleBuilder +} + +func (b *StartBuilder) StateName(input string) *StartBuilder { + b.model.StateName = input + return b +} + +func (b *StartBuilder) Schedule() *ScheduleBuilder { + if b.schedule == nil { + b.schedule = NewScheduleBuilder() + } + return b.schedule +} + +func (b *StartBuilder) Build() Start { + if b.schedule != nil { + schedule := b.schedule.Build() + b.model.Schedule = &schedule + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewStateBuilder() *StateBuilder { + builder := &StateBuilder{} + builder.model = State{} + builder.BaseStateBuilder = *NewBaseStateBuilder() + return builder +} + +type StateBuilder struct { + model State + BaseStateBuilder + *DelayStateBuilder + *EventStateBuilder + *OperationStateBuilder + *ParallelStateBuilder + *SwitchStateBuilder + *ForEachStateBuilder + *InjectStateBuilder + *CallbackStateBuilder + *SleepStateBuilder +} + +func (b *StateBuilder) ID(input string) *StateBuilder { + b.BaseStateBuilder.ID(input) + return b +} + +func (b *StateBuilder) Name(input string) *StateBuilder { + b.BaseStateBuilder.Name(input) + return b +} + +func (b *StateBuilder) Type(input StateType) *StateBuilder { + b.BaseStateBuilder.Type(input) + return b +} + +func (b *StateBuilder) CompensatedBy(input string) *StateBuilder { + b.BaseStateBuilder.CompensatedBy(input) + return b +} + +func (b *StateBuilder) UsedForCompensation(input bool) *StateBuilder { + b.BaseStateBuilder.UsedForCompensation(input) + return b +} + +func (b *StateBuilder) DelayState() *DelayStateBuilder { + if b.DelayStateBuilder == nil { + b.DelayStateBuilder = NewDelayStateBuilder() + } + return b.DelayStateBuilder +} + +func (b *StateBuilder) TimeDelay(input string) *StateBuilder { + b.DelayStateBuilder.TimeDelay(input) + return b +} + +func (b *StateBuilder) EventState() *EventStateBuilder { + if b.EventStateBuilder == nil { + b.EventStateBuilder = NewEventStateBuilder() + } + return b.EventStateBuilder +} + +func (b *StateBuilder) Exclusive(input bool) *StateBuilder { + b.EventStateBuilder.Exclusive(input) + return b +} + +func (b *StateBuilder) OperationState() *OperationStateBuilder { + if b.OperationStateBuilder == nil { + b.OperationStateBuilder = NewOperationStateBuilder() + } + return b.OperationStateBuilder +} + +func (b *StateBuilder) ActionMode(input ActionMode) *StateBuilder { + b.OperationStateBuilder.ActionMode(input) + return b +} + +func (b *StateBuilder) ParallelState() *ParallelStateBuilder { + if b.ParallelStateBuilder == nil { + b.ParallelStateBuilder = NewParallelStateBuilder() + } + return b.ParallelStateBuilder +} + +func (b *StateBuilder) CompletionType(input CompletionType) *StateBuilder { + b.ParallelStateBuilder.CompletionType(input) + return b +} + +func (b *StateBuilder) SwitchState() *SwitchStateBuilder { + if b.SwitchStateBuilder == nil { + b.SwitchStateBuilder = NewSwitchStateBuilder() + } + return b.SwitchStateBuilder +} + +func (b *StateBuilder) ForEachState() *ForEachStateBuilder { + if b.ForEachStateBuilder == nil { + b.ForEachStateBuilder = NewForEachStateBuilder() + } + return b.ForEachStateBuilder +} + +func (b *StateBuilder) InputCollection(input string) *StateBuilder { + b.ForEachStateBuilder.InputCollection(input) + return b +} + +func (b *StateBuilder) OutputCollection(input string) *StateBuilder { + b.ForEachStateBuilder.OutputCollection(input) + return b +} + +func (b *StateBuilder) IterationParam(input string) *StateBuilder { + b.ForEachStateBuilder.IterationParam(input) + return b +} + +func (b *StateBuilder) Mode(input ForEachModeType) *StateBuilder { + b.ForEachStateBuilder.Mode(input) + return b +} + +func (b *StateBuilder) InjectState() *InjectStateBuilder { + if b.InjectStateBuilder == nil { + b.InjectStateBuilder = NewInjectStateBuilder() + } + return b.InjectStateBuilder +} + +func (b *StateBuilder) CallbackState() *CallbackStateBuilder { + if b.CallbackStateBuilder == nil { + b.CallbackStateBuilder = NewCallbackStateBuilder() + } + return b.CallbackStateBuilder +} + +func (b *StateBuilder) EventRef(input string) *StateBuilder { + b.CallbackStateBuilder.EventRef(input) + return b +} + +func (b *StateBuilder) SleepState() *SleepStateBuilder { + if b.SleepStateBuilder == nil { + b.SleepStateBuilder = NewSleepStateBuilder() + } + return b.SleepStateBuilder +} + +func (b *StateBuilder) Duration(input string) *StateBuilder { + b.SleepStateBuilder.Duration(input) + return b +} + +func (b *StateBuilder) Build() State { + b.model.BaseState = b.BaseStateBuilder.Build() + if b.DelayStateBuilder != nil { + delaystate := b.DelayStateBuilder.Build() + b.model.DelayState = &delaystate + } + if b.EventStateBuilder != nil { + eventstate := b.EventStateBuilder.Build() + b.model.EventState = &eventstate + } + if b.OperationStateBuilder != nil { + operationstate := b.OperationStateBuilder.Build() + b.model.OperationState = &operationstate + } + if b.ParallelStateBuilder != nil { + parallelstate := b.ParallelStateBuilder.Build() + b.model.ParallelState = ¶llelstate + } + if b.SwitchStateBuilder != nil { + switchstate := b.SwitchStateBuilder.Build() + b.model.SwitchState = &switchstate + } + if b.ForEachStateBuilder != nil { + foreachstate := b.ForEachStateBuilder.Build() + b.model.ForEachState = &foreachstate + } + if b.InjectStateBuilder != nil { + injectstate := b.InjectStateBuilder.Build() + b.model.InjectState = &injectstate + } + if b.CallbackStateBuilder != nil { + callbackstate := b.CallbackStateBuilder.Build() + b.model.CallbackState = &callbackstate + } + if b.SleepStateBuilder != nil { + sleepstate := b.SleepStateBuilder.Build() + b.model.SleepState = &sleepstate + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewStateDataFilterBuilder() *StateDataFilterBuilder { + builder := &StateDataFilterBuilder{} + builder.model = StateDataFilter{} + return builder +} + +type StateDataFilterBuilder struct { + model StateDataFilter +} + +func (b *StateDataFilterBuilder) Input(input string) *StateDataFilterBuilder { + b.model.Input = input + return b +} + +func (b *StateDataFilterBuilder) Output(input string) *StateDataFilterBuilder { + b.model.Output = input + return b +} + +func (b *StateDataFilterBuilder) Build() StateDataFilter { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewStateExecTimeoutBuilder() *StateExecTimeoutBuilder { + builder := &StateExecTimeoutBuilder{} + builder.model = StateExecTimeout{} + return builder +} + +type StateExecTimeoutBuilder struct { + model StateExecTimeout +} + +func (b *StateExecTimeoutBuilder) Single(input string) *StateExecTimeoutBuilder { + b.model.Single = input + return b +} + +func (b *StateExecTimeoutBuilder) Total(input string) *StateExecTimeoutBuilder { + b.model.Total = input + return b +} + +func (b *StateExecTimeoutBuilder) Build() StateExecTimeout { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewStatesBuilder() *StatesBuilder { + builder := &StatesBuilder{} + builder.model = States{} + return builder +} + +type StatesBuilder struct { + model States +} + +func (b *StatesBuilder) Build() States { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSwitchStateBuilder() *SwitchStateBuilder { + builder := &SwitchStateBuilder{} + builder.model = SwitchState{} + builder.defaultcondition = NewDefaultConditionBuilder() + builder.eventconditions = []*EventConditionBuilder{} + builder.dataconditions = []*DataConditionBuilder{} + return builder +} + +type SwitchStateBuilder struct { + model SwitchState + defaultcondition *DefaultConditionBuilder + eventconditions []*EventConditionBuilder + dataconditions []*DataConditionBuilder + timeouts *SwitchStateTimeoutBuilder +} + +func (b *SwitchStateBuilder) DefaultCondition() *DefaultConditionBuilder { + return b.defaultcondition +} + +func (b *SwitchStateBuilder) AddEventConditions() *EventConditionBuilder { + builder := NewEventConditionBuilder() + b.eventconditions = append(b.eventconditions, builder) + return builder +} + +func (b *SwitchStateBuilder) RemoveEventConditions(remove *EventConditionBuilder) { + for i, val := range b.eventconditions { + if val == remove { + b.eventconditions[i] = b.eventconditions[len(b.eventconditions)-1] + b.eventconditions = b.eventconditions[:len(b.eventconditions)-1] + } + } +} +func (b *SwitchStateBuilder) AddDataConditions() *DataConditionBuilder { + builder := NewDataConditionBuilder() + b.dataconditions = append(b.dataconditions, builder) + return builder +} + +func (b *SwitchStateBuilder) RemoveDataConditions(remove *DataConditionBuilder) { + for i, val := range b.dataconditions { + if val == remove { + b.dataconditions[i] = b.dataconditions[len(b.dataconditions)-1] + b.dataconditions = b.dataconditions[:len(b.dataconditions)-1] + } + } +} +func (b *SwitchStateBuilder) Timeouts() *SwitchStateTimeoutBuilder { + if b.timeouts == nil { + b.timeouts = NewSwitchStateTimeoutBuilder() + } + return b.timeouts +} + +func (b *SwitchStateBuilder) Build() SwitchState { + b.model.DefaultCondition = b.defaultcondition.Build() + b.model.EventConditions = []EventCondition{} + for _, v := range b.eventconditions { + b.model.EventConditions = append(b.model.EventConditions, v.Build()) + } + b.model.DataConditions = []DataCondition{} + for _, v := range b.dataconditions { + b.model.DataConditions = append(b.model.DataConditions, v.Build()) + } + if b.timeouts != nil { + timeouts := b.timeouts.Build() + b.model.Timeouts = &timeouts + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewSwitchStateTimeoutBuilder() *SwitchStateTimeoutBuilder { + builder := &SwitchStateTimeoutBuilder{} + builder.model = SwitchStateTimeout{} + return builder +} + +type SwitchStateTimeoutBuilder struct { + model SwitchStateTimeout + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *SwitchStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *SwitchStateTimeoutBuilder) EventTimeout(input string) *SwitchStateTimeoutBuilder { + b.model.EventTimeout = input + return b +} + +func (b *SwitchStateTimeoutBuilder) Build() SwitchStateTimeout { + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewTimeoutsBuilder() *TimeoutsBuilder { + builder := &TimeoutsBuilder{} + builder.model = Timeouts{} + return builder +} + +type TimeoutsBuilder struct { + model Timeouts + workflowexectimeout *WorkflowExecTimeoutBuilder + stateexectimeout *StateExecTimeoutBuilder +} + +func (b *TimeoutsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { + if b.workflowexectimeout == nil { + b.workflowexectimeout = NewWorkflowExecTimeoutBuilder() + } + return b.workflowexectimeout +} + +func (b *TimeoutsBuilder) StateExecTimeout() *StateExecTimeoutBuilder { + if b.stateexectimeout == nil { + b.stateexectimeout = NewStateExecTimeoutBuilder() + } + return b.stateexectimeout +} + +func (b *TimeoutsBuilder) ActionExecTimeout(input string) *TimeoutsBuilder { + b.model.ActionExecTimeout = input + return b +} + +func (b *TimeoutsBuilder) BranchExecTimeout(input string) *TimeoutsBuilder { + b.model.BranchExecTimeout = input + return b +} + +func (b *TimeoutsBuilder) EventTimeout(input string) *TimeoutsBuilder { + b.model.EventTimeout = input + return b +} + +func (b *TimeoutsBuilder) Build() Timeouts { + if b.workflowexectimeout != nil { + workflowexectimeout := b.workflowexectimeout.Build() + b.model.WorkflowExecTimeout = &workflowexectimeout + } + if b.stateexectimeout != nil { + stateexectimeout := b.stateexectimeout.Build() + b.model.StateExecTimeout = &stateexectimeout + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewTransitionBuilder() *TransitionBuilder { + builder := &TransitionBuilder{} + builder.model = Transition{} + builder.produceevents = []*ProduceEventBuilder{} + return builder +} + +type TransitionBuilder struct { + model Transition + stateparent *StateBuilder + produceevents []*ProduceEventBuilder +} + +func (b *TransitionBuilder) stateParent() *StateBuilder { + if b.stateparent == nil { + b.stateparent = NewStateBuilder() + } + return b.stateparent +} + +func (b *TransitionBuilder) NextState(input string) *TransitionBuilder { + b.model.NextState = input + return b +} + +func (b *TransitionBuilder) AddProduceEvents() *ProduceEventBuilder { + builder := NewProduceEventBuilder() + b.produceevents = append(b.produceevents, builder) + return builder +} + +func (b *TransitionBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { + for i, val := range b.produceevents { + if val == remove { + b.produceevents[i] = b.produceevents[len(b.produceevents)-1] + b.produceevents = b.produceevents[:len(b.produceevents)-1] + } + } +} +func (b *TransitionBuilder) Compensate(input bool) *TransitionBuilder { + b.model.Compensate = input + return b +} + +func (b *TransitionBuilder) Build() Transition { + if b.stateparent != nil { + stateparent := b.stateparent.Build() + b.model.stateParent = &stateparent + } + b.model.ProduceEvents = []ProduceEvent{} + for _, v := range b.produceevents { + b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewWorkflowBuilder() *WorkflowBuilder { + builder := &WorkflowBuilder{} + builder.model = Workflow{} + builder.BaseWorkflowBuilder = *NewBaseWorkflowBuilder() + builder.states = []*StateBuilder{} + builder.events = []*EventBuilder{} + builder.functions = []*FunctionBuilder{} + builder.retries = []*RetryBuilder{} + return builder +} + +type WorkflowBuilder struct { + model Workflow + BaseWorkflowBuilder + states []*StateBuilder + events []*EventBuilder + functions []*FunctionBuilder + retries []*RetryBuilder +} + +func (b *WorkflowBuilder) ID(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.ID(input) + return b +} + +func (b *WorkflowBuilder) Key(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.Key(input) + return b +} + +func (b *WorkflowBuilder) Name(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.Name(input) + return b +} + +func (b *WorkflowBuilder) Description(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.Description(input) + return b +} + +func (b *WorkflowBuilder) Version(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.Version(input) + return b +} + +func (b *WorkflowBuilder) SpecVersion(input string) *WorkflowBuilder { + b.BaseWorkflowBuilder.SpecVersion(input) + return b +} + +func (b *WorkflowBuilder) ExpressionLang(input ExpressionLangType) *WorkflowBuilder { + b.BaseWorkflowBuilder.ExpressionLang(input) + return b +} + +func (b *WorkflowBuilder) KeepActive(input bool) *WorkflowBuilder { + b.BaseWorkflowBuilder.KeepActive(input) + return b +} + +func (b *WorkflowBuilder) AutoRetries(input bool) *WorkflowBuilder { + b.BaseWorkflowBuilder.AutoRetries(input) + return b +} + +func (b *WorkflowBuilder) AddStates() *StateBuilder { + builder := NewStateBuilder() + b.states = append(b.states, builder) + return builder +} + +func (b *WorkflowBuilder) RemoveStates(remove *StateBuilder) { + for i, val := range b.states { + if val == remove { + b.states[i] = b.states[len(b.states)-1] + b.states = b.states[:len(b.states)-1] + } + } +} +func (b *WorkflowBuilder) AddEvents() *EventBuilder { + builder := NewEventBuilder() + b.events = append(b.events, builder) + return builder +} + +func (b *WorkflowBuilder) RemoveEvents(remove *EventBuilder) { + for i, val := range b.events { + if val == remove { + b.events[i] = b.events[len(b.events)-1] + b.events = b.events[:len(b.events)-1] + } + } +} +func (b *WorkflowBuilder) AddFunctions() *FunctionBuilder { + builder := NewFunctionBuilder() + b.functions = append(b.functions, builder) + return builder +} + +func (b *WorkflowBuilder) RemoveFunctions(remove *FunctionBuilder) { + for i, val := range b.functions { + if val == remove { + b.functions[i] = b.functions[len(b.functions)-1] + b.functions = b.functions[:len(b.functions)-1] + } + } +} +func (b *WorkflowBuilder) AddRetries() *RetryBuilder { + builder := NewRetryBuilder() + b.retries = append(b.retries, builder) + return builder +} + +func (b *WorkflowBuilder) RemoveRetries(remove *RetryBuilder) { + for i, val := range b.retries { + if val == remove { + b.retries[i] = b.retries[len(b.retries)-1] + b.retries = b.retries[:len(b.retries)-1] + } + } +} +func (b *WorkflowBuilder) Build() Workflow { + b.model.BaseWorkflow = b.BaseWorkflowBuilder.Build() + b.model.States = []State{} + for _, v := range b.states { + b.model.States = append(b.model.States, v.Build()) + } + b.model.Events = []Event{} + for _, v := range b.events { + b.model.Events = append(b.model.Events, v.Build()) + } + b.model.Functions = []Function{} + for _, v := range b.functions { + b.model.Functions = append(b.model.Functions, v.Build()) + } + b.model.Retries = []Retry{} + for _, v := range b.retries { + b.model.Retries = append(b.model.Retries, v.Build()) + } + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewWorkflowExecTimeoutBuilder() *WorkflowExecTimeoutBuilder { + builder := &WorkflowExecTimeoutBuilder{} + builder.model = WorkflowExecTimeout{} + builder.model.ApplyDefault() + return builder +} + +type WorkflowExecTimeoutBuilder struct { + model WorkflowExecTimeout +} + +func (b *WorkflowExecTimeoutBuilder) Duration(input string) *WorkflowExecTimeoutBuilder { + b.model.Duration = input + return b +} + +func (b *WorkflowExecTimeoutBuilder) Interrupt(input bool) *WorkflowExecTimeoutBuilder { + b.model.Interrupt = input + return b +} + +func (b *WorkflowExecTimeoutBuilder) RunBefore(input string) *WorkflowExecTimeoutBuilder { + b.model.RunBefore = input + return b +} + +func (b *WorkflowExecTimeoutBuilder) Build() WorkflowExecTimeout { + return b.model +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func NewWorkflowRefBuilder() *WorkflowRefBuilder { + builder := &WorkflowRefBuilder{} + builder.model = WorkflowRef{} + builder.model.ApplyDefault() + return builder +} + +type WorkflowRefBuilder struct { + model WorkflowRef +} + +func (b *WorkflowRefBuilder) WorkflowID(input string) *WorkflowRefBuilder { + b.model.WorkflowID = input + return b +} + +func (b *WorkflowRefBuilder) Version(input string) *WorkflowRefBuilder { + b.model.Version = input + return b +} + +func (b *WorkflowRefBuilder) Invoke(input InvokeKind) *WorkflowRefBuilder { + b.model.Invoke = input + return b +} + +func (b *WorkflowRefBuilder) OnParentComplete(input OnParentCompleteType) *WorkflowRefBuilder { + b.model.OnParentComplete = input + return b +} + +func (b *WorkflowRefBuilder) Build() WorkflowRef { + return b.model +} From b1933e7bb601f33e0206518130891058e58c6a28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Wed, 3 Apr 2024 09:38:50 -0300 Subject: [PATCH 083/110] Support for validating structs created by builder (#198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Support to validate any struct Signed-off-by: AndrΓ© R. de Miranda * Use custom messages in validation Signed-off-by: AndrΓ© R. de Miranda --------- Signed-off-by: AndrΓ© R. de Miranda --- builder/builder.go | 26 +++++++++------ builder/builder_test.go | 21 +++++++++++- model/workflow_validator.go | 64 +++++++++++++++++++++++-------------- parser/parser.go | 2 +- parser/parser_test.go | 7 ++-- 5 files changed, 82 insertions(+), 38 deletions(-) diff --git a/builder/builder.go b/builder/builder.go index 1bb4089..97ef3b2 100644 --- a/builder/builder.go +++ b/builder/builder.go @@ -27,13 +27,12 @@ func New() *model.WorkflowBuilder { return model.NewWorkflowBuilder() } -func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { - workflow := builder.Build() - ctx := model.NewValidatorContext(&workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { +func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { + data, err := Json(builder) + if err != nil { return nil, err } - return &workflow, nil + return yaml.JSONToYAML(data) } func Json(builder *model.WorkflowBuilder) ([]byte, error) { @@ -44,10 +43,19 @@ func Json(builder *model.WorkflowBuilder) ([]byte, error) { return json.Marshal(workflow) } -func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { - data, err := Json(builder) - if err != nil { +func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { + workflow := builder.Build() + ctx := model.NewValidatorContext(&workflow) + if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { return nil, err } - return yaml.JSONToYAML(data) + return &workflow, nil +} + +func Validate(object interface{}) error { + ctx := model.NewValidatorContext(object) + if err := val.GetValidator().StructCtx(ctx, object); err != nil { + return val.WorkflowError(err) + } + return nil } diff --git a/builder/builder_test.go b/builder/builder_test.go index 5aa661f..47a13d2 100644 --- a/builder/builder_test.go +++ b/builder/builder_test.go @@ -17,8 +17,10 @@ package builder import ( "testing" - "github.com/serverlessworkflow/sdk-go/v2/model" "github.com/stretchr/testify/assert" + + "github.com/serverlessworkflow/sdk-go/v2/model" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) func prepareBuilder() *model.WorkflowBuilder { @@ -43,6 +45,23 @@ func prepareBuilder() *model.WorkflowBuilder { return builder } +func TestValidate(t *testing.T) { + state1 := model.NewStateBuilder(). + Name("state"). + Type(model.StateTypeInject) + state1.End().Terminate(true) + err := Validate(state1) + assert.NoError(t, err) + + state2 := model.NewStateBuilder(). + Type(model.StateTypeInject) + state2.End().Terminate(true) + err = Validate(state2.Build()) + if assert.Error(t, err) { + assert.Equal(t, "state.name is required", err.(val.WorkflowErrors)[0].Error()) + } +} + func TestObject(t *testing.T) { workflow, err := Object(prepareBuilder()) if assert.NoError(t, err) { diff --git a/model/workflow_validator.go b/model/workflow_validator.go index ad72717..fd3d7bb 100644 --- a/model/workflow_validator.go +++ b/model/workflow_validator.go @@ -75,68 +75,84 @@ func (c *ValidatorContext) init(workflow *Workflow) { } func (c *ValidatorContext) ExistState(name string) bool { + if c.States == nil { + return true + } _, ok := c.States[name] return ok } func (c *ValidatorContext) ExistFunction(name string) bool { + if c.Functions == nil { + return true + } _, ok := c.Functions[name] return ok } func (c *ValidatorContext) ExistEvent(name string) bool { + if c.Events == nil { + return true + } _, ok := c.Events[name] return ok } func (c *ValidatorContext) ExistRetry(name string) bool { + if c.Retries == nil { + return true + } _, ok := c.Retries[name] return ok } func (c *ValidatorContext) ExistError(name string) bool { + if c.Errors == nil { + return true + } _, ok := c.Errors[name] return ok } -func NewValidatorContext(workflow *Workflow) context.Context { - for i := range workflow.States { - s := &workflow.States[i] - if s.BaseState.Transition != nil { - s.BaseState.Transition.stateParent = s - } - for _, onError := range s.BaseState.OnErrors { - if onError.Transition != nil { - onError.Transition.stateParent = s - } - } - if s.Type == StateTypeSwitch { - if s.SwitchState.DefaultCondition.Transition != nil { - s.SwitchState.DefaultCondition.Transition.stateParent = s +func NewValidatorContext(object any) context.Context { + contextValue := ValidatorContext{} + + if workflow, ok := object.(*Workflow); ok { + for i := range workflow.States { + s := &workflow.States[i] + if s.BaseState.Transition != nil { + s.BaseState.Transition.stateParent = s } - for _, e := range s.SwitchState.EventConditions { - if e.Transition != nil { - e.Transition.stateParent = s + for _, onError := range s.BaseState.OnErrors { + if onError.Transition != nil { + onError.Transition.stateParent = s } } - for _, d := range s.SwitchState.DataConditions { - if d.Transition != nil { - d.Transition.stateParent = s + if s.Type == StateTypeSwitch { + if s.SwitchState.DefaultCondition.Transition != nil { + s.SwitchState.DefaultCondition.Transition.stateParent = s + } + for _, e := range s.SwitchState.EventConditions { + if e.Transition != nil { + e.Transition.stateParent = s + } + } + for _, d := range s.SwitchState.DataConditions { + if d.Transition != nil { + d.Transition.stateParent = s + } } } } + contextValue.init(workflow) } - contextValue := ValidatorContext{} - contextValue.init(workflow) - return context.WithValue(context.Background(), ValidatorContextValue, contextValue) } func init() { // TODO: create states graph to complex check - // val.GetValidator().RegisterStructValidationCtx(val.ValidationWrap(nil, workflowStructLevelValidation), Workflow{}) val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onErrorStructLevelValidationCtx), OnError{}) val.GetValidator().RegisterStructValidationCtx(ValidationWrap(transitionStructLevelValidationCtx), Transition{}) val.GetValidator().RegisterStructValidationCtx(ValidationWrap(startStructLevelValidationCtx), Start{}) diff --git a/parser/parser.go b/parser/parser.go index fc50692..7b7ad93 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -53,7 +53,7 @@ func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { ctx := model.NewValidatorContext(workflow) if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, err + return nil, val.WorkflowError(err) } return workflow, nil } diff --git a/parser/parser_test.go b/parser/parser_test.go index 91dc273..faa28b8 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -1016,7 +1016,7 @@ states: // Make sure that the Action FunctionRef is unmarshalled correctly assert.Equal(t, model.FromString("${ .singlemessage }"), workflow.States[5].ForEachState.Actions[0].FunctionRef.Arguments["message"]) assert.Equal(t, "sendTextFunction", workflow.States[5].ForEachState.Actions[0].FunctionRef.RefName) - assert.Nil(t, err) + assert.NoError(t, err) }) @@ -1063,8 +1063,9 @@ states: end: terminate: true `)) - assert.Error(t, err) - assert.Regexp(t, `validation for \'DataConditions\' failed on the \'required\' tag`, err) + if assert.Error(t, err) { + assert.Equal(t, `workflow.states[0].switchState.dataConditions is required`, err.Error()) + } assert.Nil(t, workflow) }) From 66cd3e5bf3cffb7d8725f9a00d5723ab64d17d78 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Wed, 8 May 2024 10:14:41 -0300 Subject: [PATCH 084/110] Pin deepcopy version. (#200) chore: Fix go: k8s.io/code-generator/cmd/deepcopy-gen@latest (in k8s.io/code-generator@v0.30.0): go.mod:5: invalid go version '1.22.0': must match format 1.23 Signed-off-by: Spolti --- hack/deepcopy-gen.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh index f8d30f3..9c7fe0f 100755 --- a/hack/deepcopy-gen.sh +++ b/hack/deepcopy-gen.sh @@ -32,7 +32,8 @@ shift 1 # make sure your GOPATH env is properly set. # it will go under $GOPATH/bin cd "$(dirname "${0}")" - GO111MODULE=on go install k8s.io/code-generator/cmd/deepcopy-gen@latest + DEEPCOPY_VERSION="v0.29.4" + GO111MODULE=on go install k8s.io/code-generator/cmd/deepcopy-gen@${DEEPCOPY_VERSION} ) function codegen::join() { local IFS="$1"; shift; echo "$*"; } From bb89ed03b57c519eb6b7e2387aadd60ec6fe2521 Mon Sep 17 00:00:00 2001 From: Filippe Spolti Date: Wed, 8 May 2024 13:43:22 -0300 Subject: [PATCH 085/110] DataInputSchema can only be a string and not an object (#196) --- Makefile | 1 + builder/builder_test.go | 9 ++- model/workflow.go | 43 +++++++++++++- model/workflow_test.go | 57 ++++++++++++++----- model/workflow_validator_test.go | 5 +- model/zz_generated.buildergen.go | 41 +++++++++++-- model/zz_generated.deepcopy.go | 7 ++- parser/parser_test.go | 22 ++++++- parser/testdata/datainputschema.json | 16 ++++++ .../workflows/dataInputSchemaObject.json | 56 ++++++++++++++++++ .../workflows/dataInputSchemaValidation.yaml | 2 +- util/unmarshal.go | 13 ++++- util/unmarshal_test.go | 10 ++-- 13 files changed, 245 insertions(+), 37 deletions(-) create mode 100644 parser/testdata/datainputschema.json create mode 100644 parser/testdata/workflows/dataInputSchemaObject.json diff --git a/Makefile b/Makefile index 0b0833f..06fde64 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,7 @@ lint: .PHONY: test coverage="false" + test: deepcopy buildergen make lint @go test ./... diff --git a/builder/builder_test.go b/builder/builder_test.go index 47a13d2..97b8c84 100644 --- a/builder/builder_test.go +++ b/builder/builder_test.go @@ -17,6 +17,7 @@ package builder import ( "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/serverlessworkflow/sdk-go/v2/model" @@ -58,7 +59,13 @@ func TestValidate(t *testing.T) { state2.End().Terminate(true) err = Validate(state2.Build()) if assert.Error(t, err) { - assert.Equal(t, "state.name is required", err.(val.WorkflowErrors)[0].Error()) + var workflowErrors val.WorkflowErrors + if errors.As(err, &workflowErrors) { + assert.Equal(t, "state.name is required", workflowErrors[0].Error()) + } else { + // Handle other error types if necessary + t.Errorf("Unexpected error: %v", err) + } } } diff --git a/model/workflow.go b/model/workflow.go index 8f7f032..aa72d1f 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -15,7 +15,9 @@ package model import ( + "bytes" "encoding/json" + "errors" "github.com/serverlessworkflow/sdk-go/v2/util" ) @@ -121,7 +123,7 @@ type BaseWorkflow struct { // qualities. // +optional Annotations []string `json:"annotations,omitempty"` - // DataInputSchema URI of the JSON Schema used to validate the workflow data input + // DataInputSchema URI or Object of the JSON Schema used to validate the workflow data input // +optional DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` // Serverless Workflow schema version @@ -225,6 +227,7 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { return nil } +// States ... // +kubebuilder:validation:MinItems=1 type States []State @@ -510,7 +513,7 @@ type StateDataFilter struct { // +builder-gen:new-call=ApplyDefault type DataInputSchema struct { // +kubebuilder:validation:Required - Schema string `json:"schema" validate:"required"` + Schema *Object `json:"schema" validate:"required"` // +kubebuilder:validation:Required FailOnValidationErrors bool `json:"failOnValidationErrors"` } @@ -520,7 +523,41 @@ type dataInputSchemaUnmarshal DataInputSchema // UnmarshalJSON implements json.Unmarshaler func (d *DataInputSchema) UnmarshalJSON(data []byte) error { d.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &d.Schema, (*dataInputSchemaUnmarshal)(d)) + + // expected: data = "{\"key\": \"value\"}" + // data = {"key": "value"} + // data = "file://..." + // data = { "schema": "{\"key\": \"value\"}", "failOnValidationErrors": true } + // data = { "schema": {"key": "value"}, "failOnValidationErrors": true } + // data = { "schema": "file://...", "failOnValidationErrors": true } + + schemaString := "" + err := util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &schemaString, (*dataInputSchemaUnmarshal)(d)) + if err != nil { + return err + } + + if d.Schema != nil { + if d.Schema.Type == Map { + return nil + + } else if d.Schema.Type == String { + schemaString = d.Schema.StringValue + + } else { + return errors.New("invalid dataInputSchema must be a string or object") + } + } + + if schemaString != "" { + data = []byte(schemaString) + if bytes.TrimSpace(data)[0] != '{' { + data = []byte("\"" + schemaString + "\"") + } + } + + d.Schema = new(Object) + return util.UnmarshalObjectOrFile("schema", data, &d.Schema) } // ApplyDefault set the default values for Data Input Schema diff --git a/model/workflow_test.go b/model/workflow_test.go index 352a751..a5aa42a 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -498,6 +498,13 @@ func TestTransitionUnmarshalJSON(t *testing.T) { } func TestDataInputSchemaUnmarshalJSON(t *testing.T) { + + var schemaName Object + err := json.Unmarshal([]byte("{\"key\": \"value\"}"), &schemaName) + if !assert.NoError(t, err) { + return + } + type testCase struct { desp string data string @@ -508,39 +515,58 @@ func TestDataInputSchemaUnmarshalJSON(t *testing.T) { testCases := []testCase{ { desp: "string success", - data: `"schema name"`, + data: "{\"key\": \"value\"}", expect: DataInputSchema{ - Schema: "schema name", + Schema: &schemaName, FailOnValidationErrors: true, }, err: ``, }, { - desp: `object success`, - data: `{"schema": "schema name"}`, + desp: "string fail", + data: "{\"key\": }", expect: DataInputSchema{ - Schema: "schema name", + Schema: &schemaName, + FailOnValidationErrors: true, + }, + err: `invalid character '}' looking for beginning of value`, + }, + { + desp: `object success (without quotes)`, + data: `{"key": "value"}`, + expect: DataInputSchema{ + Schema: &schemaName, FailOnValidationErrors: true, }, err: ``, }, { - desp: `object fail`, - data: `{"schema": "schema name}`, + desp: `schema object success`, + data: `{"schema": "{\"key\": \"value\"}"}`, expect: DataInputSchema{ - Schema: "schema name", + Schema: &schemaName, FailOnValidationErrors: true, }, - err: `unexpected end of JSON input`, + err: ``, }, { - desp: `object key invalid`, - data: `{"schema_invalid": "schema name"}`, + desp: `schema object success (without quotes)`, + data: `{"schema": {"key": "value"}}`, expect: DataInputSchema{ + Schema: &schemaName, FailOnValidationErrors: true, }, err: ``, }, + { + desp: `schema object fail`, + data: `{"schema": "schema name}`, + expect: DataInputSchema{ + Schema: &schemaName, + FailOnValidationErrors: true, + }, + err: `unexpected end of JSON input`, + }, } for _, tc := range testCases { t.Run(tc.desp, func(t *testing.T) { @@ -548,13 +574,14 @@ func TestDataInputSchemaUnmarshalJSON(t *testing.T) { err := json.Unmarshal([]byte(tc.data), &v) if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) + assert.Error(t, err, tc.desp) + assert.Regexp(t, tc.err, err, tc.desp) return } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + assert.NoError(t, err, tc.desp) + assert.Equal(t, tc.expect.Schema, v.Schema, tc.desp) + assert.Equal(t, tc.expect.FailOnValidationErrors, v.FailOnValidationErrors, tc.desp) }) } } diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go index 9cdb77e..2a6b5a0 100644 --- a/model/workflow_validator_test.go +++ b/model/workflow_validator_test.go @@ -425,6 +425,8 @@ func TestDataInputSchemaStructLevelValidation(t *testing.T) { action1 := buildActionByOperationState(operationState, "action 1") buildFunctionRef(baseWorkflow, action1, "function 1") + sampleSchema := FromString("sample schema") + testCases := []ValidationCase{ { Desp: "empty DataInputSchema", @@ -440,13 +442,14 @@ func TestDataInputSchemaStructLevelValidation(t *testing.T) { Model: func() Workflow { model := baseWorkflow.DeepCopy() model.DataInputSchema = &DataInputSchema{ - Schema: "sample schema", + Schema: &sampleSchema, } return *model }, }, } + //fmt.Printf("%+v", testCases[0].Model) StructLevelValidationCtx(t, testCases) } diff --git a/model/zz_generated.buildergen.go b/model/zz_generated.buildergen.go index 9ab7058..42564fe 100644 --- a/model/zz_generated.buildergen.go +++ b/model/zz_generated.buildergen.go @@ -1,11 +1,25 @@ //go:build !ignore_autogenerated // +build !ignore_autogenerated -// Code generated by main. DO NOT EDIT. +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Code generated by builder-gen. DO NOT EDIT. package model import ( + floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" intstr "k8s.io/apimachinery/pkg/util/intstr" ) @@ -945,12 +959,15 @@ func NewDataInputSchemaBuilder() *DataInputSchemaBuilder { } type DataInputSchemaBuilder struct { - model DataInputSchema + model DataInputSchema + schema *ObjectBuilder } -func (b *DataInputSchemaBuilder) Schema(input string) *DataInputSchemaBuilder { - b.model.Schema = input - return b +func (b *DataInputSchemaBuilder) Schema() *ObjectBuilder { + if b.schema == nil { + b.schema = NewObjectBuilder() + } + return b.schema } func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSchemaBuilder { @@ -959,6 +976,10 @@ func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSc } func (b *DataInputSchemaBuilder) Build() DataInputSchema { + if b.schema != nil { + schema := b.schema.Build() + b.model.Schema = &schema + } return b.model } @@ -2237,11 +2258,21 @@ func (b *RetryBuilder) Increment(input string) *RetryBuilder { return b } +func (b *RetryBuilder) Multiplier(input *floatstr.Float32OrString) *RetryBuilder { + b.model.Multiplier = input + return b +} + func (b *RetryBuilder) MaxAttempts(input intstr.IntOrString) *RetryBuilder { b.model.MaxAttempts = input return b } +func (b *RetryBuilder) Jitter(input floatstr.Float32OrString) *RetryBuilder { + b.model.Jitter = input + return b +} + func (b *RetryBuilder) Build() Retry { return b.model } diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go index 3e76ab1..0fb2566 100644 --- a/model/zz_generated.deepcopy.go +++ b/model/zz_generated.deepcopy.go @@ -223,7 +223,7 @@ func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { if in.DataInputSchema != nil { in, out := &in.DataInputSchema, &out.DataInputSchema *out = new(DataInputSchema) - **out = **in + (*in).DeepCopyInto(*out) } if in.Secrets != nil { in, out := &in.Secrets, &out.Secrets @@ -568,6 +568,11 @@ func (in *DataCondition) DeepCopy() *DataCondition { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { *out = *in + if in.Schema != nil { + in, out := &in.Schema, &out.Schema + *out = new(Object) + (*in).DeepCopyInto(*out) + } return } diff --git a/parser/parser_test.go b/parser/parser_test.go index faa28b8..8cc3de1 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -16,6 +16,7 @@ package parser import ( "encoding/json" + "fmt" "os" "path/filepath" "strings" @@ -582,8 +583,25 @@ func TestFromFile(t *testing.T) { }, { "./testdata/workflows/dataInputSchemaValidation.yaml", func(t *testing.T, w *model.Workflow) { assert.NotNil(t, w.DataInputSchema) - - assert.Equal(t, "sample schema", w.DataInputSchema.Schema) + expected := model.DataInputSchema{} + data, err := util.LoadExternalResource("file://testdata/datainputschema.json") + err1 := util.UnmarshalObject("schema", data, &expected.Schema) + assert.Nil(t, err) + assert.Nil(t, err1) + assert.Equal(t, expected.Schema, w.DataInputSchema.Schema) + assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) + }, + }, { + "./testdata/workflows/dataInputSchemaObject.json", func(t *testing.T, w *model.Workflow) { + assert.NotNil(t, w.DataInputSchema) + expected := model.Object{} + err := json.Unmarshal([]byte("{\"title\": \"Hello World Schema\", \"properties\": {\"person\": "+ + "{\"type\": \"object\",\"properties\": {\"name\": {\"type\": \"string\"}},\"required\": "+ + "[\"name\"]}}, \"required\": [\"person\"]}"), + &expected) + fmt.Printf("err: %s\n", err) + fmt.Printf("schema: %+v\n", expected) + assert.Equal(t, &expected, w.DataInputSchema.Schema) assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) }, }, diff --git a/parser/testdata/datainputschema.json b/parser/testdata/datainputschema.json new file mode 100644 index 0000000..bace233 --- /dev/null +++ b/parser/testdata/datainputschema.json @@ -0,0 +1,16 @@ +{ + "title": "Hello World Schema", + "properties": { + "person": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ] + } + } +} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaObject.json b/parser/testdata/workflows/dataInputSchemaObject.json new file mode 100644 index 0000000..7b50c0d --- /dev/null +++ b/parser/testdata/workflows/dataInputSchemaObject.json @@ -0,0 +1,56 @@ +{ + "id": "greeting", + "version": "1.0.0", + "specVersion": "0.8", + "name": "Greeting Workflow", + "description": "Greet Someone", + "start": "Greet", + "dataInputSchema": { + "failOnValidationErrors": false, + "schema": { + "title": "Hello World Schema", + "properties": { + "person": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ] + } + }, + "required": [ + "person" + ] + } + }, + "functions": [ + { + "name": "greetingFunction", + "operation": "file://myapis/greetingapis.json#greeting" + } + ], + "states": [ + { + "name": "Greet", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "greetingFunction", + "arguments": { + "name": "${ .person.name }" + } + }, + "actionDataFilter": { + "results": "${ {greeting: .greeting} }" + } + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaValidation.yaml b/parser/testdata/workflows/dataInputSchemaValidation.yaml index ed685a6..4bc1e11 100644 --- a/parser/testdata/workflows/dataInputSchemaValidation.yaml +++ b/parser/testdata/workflows/dataInputSchemaValidation.yaml @@ -18,7 +18,7 @@ specVersion: '0.8' start: Start dataInputSchema: failOnValidationErrors: false - schema: "sample schema" + schema: "file://testdata/datainputschema.json" states: - name: Start type: inject diff --git a/util/unmarshal.go b/util/unmarshal.go index 6c70f4a..d00e9d2 100644 --- a/util/unmarshal.go +++ b/util/unmarshal.go @@ -72,6 +72,13 @@ func (e *UnmarshalError) Error() string { func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) string { if err.Struct == "" && err.Field == "" { primitiveTypeName := e.primitiveType.String() + + // in some cases the e.primitiveType might be invalid, one of the reasons is because it is nil + // default to string in that case + if e.primitiveType == reflect.Invalid { + primitiveTypeName = "string" + } + var objectTypeName string if e.objectType != reflect.Invalid { switch e.objectType { @@ -107,7 +114,7 @@ func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) str return err.Error() } -func loadExternalResource(url string) (b []byte, err error) { +func LoadExternalResource(url string) (b []byte, err error) { index := strings.Index(url, "://") if index == -1 { b, err = getBytesFromFile(url) @@ -199,7 +206,7 @@ func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject * // Assumes that the value inside `data` is a path to a known location. // Returns the content of the file or a not nil error reference. - data, err = loadExternalResource(valString) + data, err = LoadExternalResource(valString) if err != nil { return err } @@ -214,7 +221,7 @@ func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject * } data = bytes.TrimSpace(data) - if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" { + if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" && parameterName != "schema" { extractData := map[string]json.RawMessage{} err = json.Unmarshal(data, &extractData) if err != nil { diff --git a/util/unmarshal_test.go b/util/unmarshal_test.go index 0227123..f7051fb 100644 --- a/util/unmarshal_test.go +++ b/util/unmarshal_test.go @@ -58,23 +58,23 @@ func Test_loadExternalResource(t *testing.T) { defer server.Close() HttpClient = *server.Client() - data, err := loadExternalResource(server.URL + "/test.json") + data, err := LoadExternalResource(server.URL + "/test.json") assert.NoError(t, err) assert.Equal(t, "{}", string(data)) - data, err = loadExternalResource("parser/testdata/eventdefs.yml") + data, err = LoadExternalResource("parser/testdata/eventdefs.yml") assert.NoError(t, err) assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - data, err = loadExternalResource("file://../parser/testdata/eventdefs.yml") + data, err = LoadExternalResource("file://../parser/testdata/eventdefs.yml") assert.NoError(t, err) assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - data, err = loadExternalResource("./parser/testdata/eventdefs.yml") + data, err = LoadExternalResource("./parser/testdata/eventdefs.yml") assert.NoError(t, err) assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - _, err = loadExternalResource("ftp://test.yml") + _, err = LoadExternalResource("ftp://test.yml") assert.ErrorContains(t, err, "unsupported scheme: \"ftp\"") } From 7219d5c6fe6710b8151e0532cc31da56c70f4659 Mon Sep 17 00:00:00 2001 From: Kshitiz Agrawal <75060259+Kshitiz1403@users.noreply.github.com> Date: Thu, 23 May 2024 00:37:31 +0530 Subject: [PATCH 086/110] fix: ISO8601 duration wouldn't parse for valid fractional values (#199) * fix: ISO8601 duration won't parse for valid fractional values Previously, the library utilized a parsing library for ISO8601 durations that parsed durations into integers only. This caused issues with durations containing fractional values, such as "P0.5S", which are valid ISO8601 strings but failed to parse due to the limitation of the previous library. In this commit, I've replaced the outdated parsing library with a more robust alternative. The new library properly handles fractional values, ensuring accurate parsing of ISO8601 duration strings, including cases like "P0.5S". This update enhances the functionality and reliability of the library by accommodating a broader range of ISO8601 duration formats. Additionally, it ensures compatibility with modern standards and improves the overall usability of the library. Signed-off-by: Kshitiz Agrawal * fix: resolve failing test cases Signed-off-by: Kshitiz Agrawal * feat: add test case for fractional ISO duration Signed-off-by: Kshitiz Agrawal --------- Signed-off-by: Kshitiz Agrawal --- go.mod | 2 +- go.sum | 4 ++-- validator/validator.go | 11 +++++++++-- validator/validator_test.go | 5 +++++ 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/go.mod b/go.mod index bbb30d4..62aae70 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/go-playground/validator/v10 v10.11.1 github.com/pkg/errors v0.9.1 github.com/relvacode/iso8601 v1.3.0 - github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 + github.com/sosodev/duration v1.2.0 github.com/stretchr/testify v1.8.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/apimachinery v0.26.2 diff --git a/go.sum b/go.sum index b705b2d..fa248b6 100644 --- a/go.sum +++ b/go.sum @@ -50,8 +50,8 @@ github.com/relvacode/iso8601 v1.3.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46 h1:Dz0HrI1AtNSGCE8LXLLqoZU4iuOJXPWndenCsZfstA8= -github.com/senseyeio/duration v0.0.0-20180430131211-7c2a214ada46/go.mod h1:is8FVkzSi7PYLWEXT5MgWhglFsyyiW8ffxAoJqfuFZo= +github.com/sosodev/duration v1.2.0 h1:pqK/FLSjsAADWY74SyWDCjOcd5l7H8GSnnOGEB9A1Us= +github.com/sosodev/duration v1.2.0/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= diff --git a/validator/validator.go b/validator/validator.go index c2ae024..f241f84 100644 --- a/validator/validator.go +++ b/validator/validator.go @@ -16,10 +16,11 @@ package validator import ( "context" + "errors" "strconv" "github.com/relvacode/iso8601" - "github.com/senseyeio/duration" + "github.com/sosodev/duration" "k8s.io/apimachinery/pkg/util/intstr" validator "github.com/go-playground/validator/v10" @@ -60,7 +61,13 @@ func GetValidator() *validator.Validate { // ValidateISO8601TimeDuration validate the string is iso8601 duration format func ValidateISO8601TimeDuration(s string) error { - _, err := duration.ParseISO8601(s) + if s == "" { + return errors.New("could not parse duration string") + } + _, err := duration.Parse(s) + if err != nil { + return errors.New("could not parse duration string") + } return err } diff --git a/validator/validator_test.go b/validator/validator_test.go index 8dd6c9c..daab56a 100644 --- a/validator/validator_test.go +++ b/validator/validator_test.go @@ -38,6 +38,11 @@ func TestValidateISO8601TimeDuration(t *testing.T) { s: "PT5S", err: ``, }, + { + desp: "fractional_second_designator", + s: "PT0.5S", + err: ``, + }, { desp: "empty value", s: "", From 04614c3a44258acd371e2d67f2564c1cb36a20a1 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 4 Oct 2024 12:15:38 -0400 Subject: [PATCH 087/110] Update README to v2.3.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 26491b0..85864f9 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.2.5](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.2.5) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.3.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.3.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 3560eebecbb4685beb437a941833b9c033190dd7 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 8 Oct 2024 12:32:25 -0400 Subject: [PATCH 088/110] Fix#206 - Upgrade dependencies and Go to 1.22 (#207) * Upgrade dependencies and Go to 1.22 Signed-off-by: Ricardo Zanini * Fix GitHub Actions and upgrade dependencies Signed-off-by: Ricardo Zanini * Fix Float32OrString validation methods Signed-off-by: Ricardo Zanini * Move custom validator to retry_validator.go Signed-off-by: Ricardo Zanini * Upgrade validator library Signed-off-by: Ricardo Zanini --------- Signed-off-by: Ricardo Zanini --- .../Go-SDK-Check-k8s-integration.yaml | 10 +- .github/workflows/Go-SDK-PR-Check.yaml | 14 +- Makefile | 8 +- go.mod | 51 +++--- go.sum | 161 ++++++++---------- model/action_validator_test.go | 5 +- model/retry.go | 2 +- model/retry_validator.go | 1 + model/retry_validator_test.go | 16 +- .../workflows/applicationrequest.url.json | 4 +- util/floatstr/floatstr.go | 76 ++++++++- 11 files changed, 216 insertions(+), 132 deletions(-) diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml index a2286d1..f91b740 100644 --- a/.github/workflows/Go-SDK-Check-k8s-integration.yaml +++ b/.github/workflows/Go-SDK-Check-k8s-integration.yaml @@ -23,21 +23,21 @@ on: branches: - main env: - GO_VERSION: 1.19 + GO_VERSION: 1.22 jobs: basic_checks: name: Basic Checks runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} id: go - name: Cache dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cache/go-build @@ -46,7 +46,7 @@ jobs: restore-keys: | ${{ runner.os }}-go- - name: Cache tools - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 8cfd8b1..aecd842 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -23,21 +23,21 @@ on: branches: - main env: - GO_VERSION: 1.19 + GO_VERSION: 1.22 jobs: basic_checks: name: Basic Checks runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} id: go - name: Cache dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cache/go-build @@ -46,7 +46,7 @@ jobs: restore-keys: | ${{ runner.os }}-go- - name: Cache tools - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} @@ -67,7 +67,7 @@ jobs: changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) [[ -z "$changed_files" ]] || (printf "Some files are not formatted properly: \n$changed_files\n Did you run 'make test' before sending the PR?" && exit 1) - name: Check lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v6 with: version: latest - name: Install cover @@ -78,7 +78,7 @@ jobs: run: | go test ./... -coverprofile test_coverage.out -covermode=atomic - name: Upload results to codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: files: ./test_coverage.out flags: sdk-go diff --git a/Makefile b/Makefile index 06fde64..e516552 100644 --- a/Makefile +++ b/Makefile @@ -6,9 +6,15 @@ fmt: @go vet ./... @go fmt ./... +goimports: + @command -v goimports > /dev/null || go install golang.org/x/tools/cmd/goimports@latest + @goimports -w . + + lint: @command -v golangci-lint > /dev/null || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin" make addheaders + make goimports make fmt ./hack/go-lint.sh ${params} @@ -38,7 +44,7 @@ LOCALBIN ?= $(shell pwd)/bin $(LOCALBIN): mkdir -p $(LOCALBIN) -CONTROLLER_TOOLS_VERSION ?= v0.9.2 +CONTROLLER_TOOLS_VERSION ?= v0.16.3 CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen .PHONY: controller-gen controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. diff --git a/go.mod b/go.mod index 62aae70..0d7ee6b 100644 --- a/go.mod +++ b/go.mod @@ -1,41 +1,44 @@ module github.com/serverlessworkflow/sdk-go/v2 -go 1.19 +go 1.22.8 + +toolchain go1.23.1 require ( - github.com/go-playground/validator/v10 v10.11.1 + github.com/go-playground/validator/v10 v10.22.1 github.com/pkg/errors v0.9.1 - github.com/relvacode/iso8601 v1.3.0 - github.com/sosodev/duration v1.2.0 - github.com/stretchr/testify v1.8.0 + github.com/relvacode/iso8601 v1.4.0 + github.com/sosodev/duration v1.3.1 + github.com/stretchr/testify v1.9.0 gopkg.in/yaml.v3 v3.0.1 - k8s.io/apimachinery v0.26.2 - sigs.k8s.io/controller-runtime v0.14.4 - sigs.k8s.io/yaml v1.3.0 + k8s.io/apimachinery v0.31.1 + sigs.k8s.io/controller-runtime v0.19.0 + sigs.k8s.io/yaml v1.4.0 ) require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-logr/logr v1.2.3 // indirect - github.com/go-playground/locales v0.14.0 // indirect - github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/fxamacker/cbor/v2 v2.7.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/leodido/go-urn v1.2.1 // indirect + github.com/leodido/go-urn v1.4.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/crypto v0.15.0 // indirect - golang.org/x/net v0.18.0 // indirect - golang.org/x/sys v0.14.0 // indirect - golang.org/x/text v0.14.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/x448/float16 v0.8.4 // indirect + golang.org/x/crypto v0.28.0 // indirect + golang.org/x/net v0.26.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 // indirect - k8s.io/utils v0.0.0-20221128185143-99ec85e7a448 // indirect - sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect + k8s.io/klog/v2 v2.130.1 // indirect + k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect + sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect ) - -replace golang.org/x/text => golang.org/x/text v0.3.8 diff --git a/go.sum b/go.sum index fa248b6..37144b7 100644 --- a/go.sum +++ b/go.sum @@ -1,141 +1,128 @@ -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= -github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= +github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= +github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= +github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af h1:kmjWCqn2qkEml422C2Rrd27c3VGxi6a/6HNq8QmHRKM= +github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af/go.mod h1:K1liHPHnj73Fdn/EKuT8nrFqBihUSKXoLYU0BuatOYo= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/onsi/ginkgo/v2 v2.6.0 h1:9t9b9vRUbFq3C4qKFCGkVuq/fIHji802N1nrtkh1mNc= -github.com/onsi/gomega v1.24.1 h1:KORJXNNTzJXzu4ScJWssJfJMnJ+2QJqhoQSRwNlze9E= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA= +github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To= +github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= +github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/relvacode/iso8601 v1.3.0 h1:HguUjsGpIMh/zsTczGN3DVJFxTU/GX+MMmzcKoMO7ko= -github.com/relvacode/iso8601 v1.3.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/sosodev/duration v1.2.0 h1:pqK/FLSjsAADWY74SyWDCjOcd5l7H8GSnnOGEB9A1Us= -github.com/sosodev/duration v1.2.0/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/relvacode/iso8601 v1.4.0 h1:GsInVSEJfkYuirYFxa80nMLbH2aydgZpIf52gYZXUJs= +github.com/relvacode/iso8601 v1.4.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= +github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4= +github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= +github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= -golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/api v0.26.1 h1:f+SWYiPd/GsiWwVRz+NbFyCgvv75Pk9NK6dlkZgpCRQ= -k8s.io/apimachinery v0.26.2 h1:da1u3D5wfR5u2RpLhE/ZtZS2P7QvDgLZTi9wrNZl/tQ= -k8s.io/apimachinery v0.26.2/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= -k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 h1:hWRbsoRWt44OEBnYUd4ceLy4ofBoh+p9vauWp/I5Gdg= -k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/utils v0.0.0-20221128185143-99ec85e7a448 h1:KTgPnR10d5zhztWptI952TNtt/4u5h3IzDXkdIMuo2Y= -k8s.io/utils v0.0.0-20221128185143-99ec85e7a448/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -sigs.k8s.io/controller-runtime v0.14.4 h1:Kd/Qgx5pd2XUL08eOV2vwIq3L9GhIbJ5Nxengbd4/0M= -sigs.k8s.io/controller-runtime v0.14.4/go.mod h1:WqIdsAY6JBsjfc/CqO0CORmNtoCtE4S6qbPc9s68h+0= -sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= -sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= -sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= -sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= -sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +k8s.io/api v0.31.0 h1:b9LiSjR2ym/SzTOlfMHm1tr7/21aD7fSkqgD/CVJBCo= +k8s.io/api v0.31.0/go.mod h1:0YiFF+JfFxMM6+1hQei8FY8M7s1Mth+z/q7eF1aJkTE= +k8s.io/apimachinery v0.31.1 h1:mhcUBbj7KUjaVhyXILglcVjuS4nYXiwC+KKFBgIVy7U= +k8s.io/apimachinery v0.31.1/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo= +k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= +k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= +k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 h1:pUdcCO1Lk/tbT5ztQWOBi5HBgbBP1J8+AsQnQCKsi8A= +k8s.io/utils v0.0.0-20240711033017-18e509b52bc8/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/controller-runtime v0.19.0 h1:nWVM7aq+Il2ABxwiCizrVDSlmDcshi9llbaFbC0ji/Q= +sigs.k8s.io/controller-runtime v0.19.0/go.mod h1:iRmWllt8IlaLjvTTDLhRBXIEtkCK6hwVBJJsYS9Ajf4= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/model/action_validator_test.go b/model/action_validator_test.go index 5445f7b..84424b5 100644 --- a/model/action_validator_test.go +++ b/model/action_validator_test.go @@ -16,6 +16,8 @@ package model import ( "testing" + + "k8s.io/apimachinery/pkg/util/intstr" ) func buildActionByOperationState(state *State, name string) *Action { @@ -64,7 +66,8 @@ func buildFunctionRef(workflow *Workflow, action *Action, name string) (*Functio func buildRetryRef(workflow *Workflow, action *Action, name string) { retry := Retry{ - Name: name, + Name: name, + MaxAttempts: intstr.FromInt32(1), } workflow.Retries = append(workflow.Retries, retry) diff --git a/model/retry.go b/model/retry.go index 83e2333..9fe6e78 100644 --- a/model/retry.go +++ b/model/retry.go @@ -53,5 +53,5 @@ func (r *Retry) UnmarshalJSON(data []byte) error { } func (r *Retry) ApplyDefault() { - r.MaxAttempts = intstr.FromInt(1) + r.MaxAttempts = intstr.FromInt32(1) } diff --git a/model/retry_validator.go b/model/retry_validator.go index b95e2f7..bd2e755 100644 --- a/model/retry_validator.go +++ b/model/retry_validator.go @@ -25,6 +25,7 @@ import ( func init() { val.GetValidator().RegisterStructValidation(retryStructLevelValidation, Retry{}) + val.GetValidator().RegisterStructValidation(floatstr.ValidateFloat32OrString, Retry{}) } // RetryStructLevelValidation custom validator for Retry Struct diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go index 5a3bca0..8b73243 100644 --- a/model/retry_validator_test.go +++ b/model/retry_validator_test.go @@ -37,7 +37,7 @@ func TestRetryStructLevelValidation(t *testing.T) { model.Retries[0].Delay = "PT5S" model.Retries[0].MaxDelay = "PT5S" model.Retries[0].Increment = "PT5S" - model.Retries[0].Jitter = floatstr.FromString("PT5S") + model.Retries[0].Jitter = floatstr.FromString("0.5") return *model }, }, @@ -82,8 +82,18 @@ func TestRetryStructLevelValidation(t *testing.T) { }, Err: `workflow.retries[0].delay invalid iso8601 duration "P5S" workflow.retries[0].maxDelay invalid iso8601 duration "P5S" -workflow.retries[0].increment invalid iso8601 duration "P5S" -workflow.retries[0].jitter invalid iso8601 duration "P5S"`, +workflow.retries[0].increment invalid iso8601 duration "P5S"`, + }, + { + Desp: "multiplier less than zero", + Model: func() Workflow { + multiplierZero := floatstr.FromString("0") + model := baseWorkflow.DeepCopy() + model.Retries[0].Multiplier = &multiplierZero + + return *model + }, + Err: `workflow.retries[0].multiplier must have the minimum `, }, } diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json index a410993..c7c341d 100644 --- a/parser/testdata/workflows/applicationrequest.url.json +++ b/parser/testdata/workflows/applicationrequest.url.json @@ -4,8 +4,8 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "specVersion": "0.8", - "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestfunctions.json", - "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json", + "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestfunctions.json", + "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestretries.json", "start": { "stateName": "CheckApplication" }, diff --git a/util/floatstr/floatstr.go b/util/floatstr/floatstr.go index 3261fdd..7481271 100644 --- a/util/floatstr/floatstr.go +++ b/util/floatstr/floatstr.go @@ -16,8 +16,14 @@ package floatstr import ( "encoding/json" + "errors" "fmt" + "reflect" "strconv" + "strings" + + "github.com/go-playground/validator/v10" + val "github.com/serverlessworkflow/sdk-go/v2/validator" ) // Float32OrString is a type that can hold a float32 or a string. @@ -71,7 +77,7 @@ func (floatstr *Float32OrString) UnmarshalJSON(value []byte) error { } // MarshalJSON implements the json.Marshaller interface. -func (floatstr Float32OrString) MarshalJSON() ([]byte, error) { +func (floatstr *Float32OrString) MarshalJSON() ([]byte, error) { switch floatstr.Type { case Float: return json.Marshal(floatstr.FloatVal) @@ -103,3 +109,71 @@ func (floatstr *Float32OrString) FloatValue() float32 { } return floatstr.FloatVal } + +func init() { + val.GetValidator().RegisterCustomTypeFunc(func(fl reflect.Value) interface{} { + if fl.Kind() != reflect.Struct { + return errors.New("invalid type: expected Float32OrString") + } + + // Get the Float32OrString value + _, ok := fl.Interface().(Float32OrString) + if !ok { + return fmt.Errorf("invalid type: expected Float32OrString") + } + + return nil + }, Float32OrString{}) +} + +func ValidateFloat32OrString(sl validator.StructLevel) { + // Get the current struct being validated. + current := sl.Current() + + for i := 0; i < current.NumField(); i++ { + field := current.Type().Field(i) + value := current.Field(i) + + // Check if the field is a pointer and handle nil pointers. + if value.Kind() == reflect.Ptr { + if value.IsNil() { + continue // Skip nil pointers. + } + value = value.Elem() // Dereference the pointer. + } + + // Check if the field is of type Float32OrString. + if value.Type() == reflect.TypeOf(Float32OrString{}) { + // Extract validation tags from the field. + tags := field.Tag.Get("validate") + + // Split tags and look for min/max. + tagList := strings.Split(tags, ",") + for _, tag := range tagList { + if strings.HasPrefix(tag, "min=") { + minVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "min="), 32) + if err != nil { + sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") + continue + } + + if value.FieldByName("FloatVal").Float() < minVal { + sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") + } + } + + if strings.HasPrefix(tag, "max=") { + maxVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "max="), 32) + if err != nil { + sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") + continue + } + + if value.FieldByName("FloatVal").Float() > maxVal { + sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") + } + } + } + } + } +} From 17bb30b28b5c3cc21b5b3417b299c7cff0f88f46 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 9 Oct 2024 12:09:32 -0400 Subject: [PATCH 089/110] Fix #210 - Add kubebuilder annotations to preserve metadata fields (#211) Signed-off-by: Ricardo Zanini --- .gitignore | 2 - Makefile | 38 +- ...erlessworkflow.io_serverlessworkflows.yaml | 1990 +++++++++++++++++ ...rkflow.io_v1alpha1_serverlessworkflow.yaml | 2 +- .../{ => api/v1alpha1}/groupversion_info.go | 6 +- .../v1alpha1/serverlessworkflow_types.go} | 27 +- .../api/v1alpha1/zz_generated.deepcopy.go | 113 + model/common.go | 2 + model/states.go | 2 + model/switch_state.go | 2 + parser/parser_test.go | 1 + .../applicationrequest.multiauth.json | 3 + 12 files changed, 2155 insertions(+), 33 deletions(-) create mode 100644 config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml rename kubernetes/workflow_cr_example.yaml => config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml (97%) rename kubernetes/{ => api/v1alpha1}/groupversion_info.go (90%) rename kubernetes/{k8s_workflow_integration.go => api/v1alpha1/serverlessworkflow_types.go} (87%) create mode 100644 kubernetes/api/v1alpha1/zz_generated.deepcopy.go diff --git a/.gitignore b/.gitignore index 33227af..55109f1 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,3 @@ bin *.out .vscode -# ignore config directory generated by the controller-gen tool -config diff --git a/Makefile b/Makefile index e516552..a8f36db 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ addheaders: - @command -v addlicense > /dev/null || go install -modfile=tools.mod -v github.com/google/addlicense + @command -v addlicense > /dev/null || (echo "πŸš€ Installing addlicense..."; go install -modfile=tools.mod -v github.com/google/addlicense) @addlicense -c "The Serverless Workflow Specification Authors" -l apache . fmt: @@ -7,34 +7,46 @@ fmt: @go fmt ./... goimports: - @command -v goimports > /dev/null || go install golang.org/x/tools/cmd/goimports@latest + @command -v goimports > /dev/null || (echo "πŸš€ Installing goimports..."; go install golang.org/x/tools/cmd/goimports@latest) @goimports -w . - lint: - @command -v golangci-lint > /dev/null || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin" - make addheaders - make goimports - make fmt - ./hack/go-lint.sh ${params} + @echo "πŸš€ Running lint..." + @command -v golangci-lint > /dev/null || (echo "πŸš€ Installing golangci-lint..."; curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin") + @make addheaders + @make goimports + @make fmt + @./hack/go-lint.sh ${params} + @echo "βœ… Linting completed!" .PHONY: test coverage="false" test: deepcopy buildergen - make lint + @echo "πŸ§ͺ Running tests..." @go test ./... + @echo "βœ… Tests completed!" -.PHONY: deepcopy buildergen deepcopy: $(DEEPCOPY_GEN) ## Download deepcopy-gen locally if necessary. - ./hack/deepcopy-gen.sh deepcopy + @echo "πŸ“¦ Running deepcopy-gen..." + @./hack/deepcopy-gen.sh deepcopy > /dev/null + @make lint + @echo "βœ… Deepcopy generation and linting completed!" buildergen: $(BUILDER_GEN) ## Download builder-gen locally if necessary. - ./hack/builder-gen.sh buildergen + @echo "πŸ“¦ Running builder-gen..." + @./hack/builder-gen.sh buildergen > /dev/null + @make lint + @echo "βœ… Builder generation and linting completed!" .PHONY: kube-integration kube-integration: controller-gen - $(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./..." output:crd:artifacts:config=config/crd/bases + @echo "πŸ“¦ Generating Kubernetes objects..." + @$(CONTROLLER_GEN) object:headerFile="./hack/boilerplate.txt" paths="./kubernetes/api/..." + @echo "πŸ“¦ Generating Kubernetes CRDs..." + @$(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./kubernetes/..." output:crd:artifacts:config=config/crd/bases + @make lint + @echo "βœ… Kubernetes integration completed!" #################################### diff --git a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml new file mode 100644 index 0000000..5584a65 --- /dev/null +++ b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml @@ -0,0 +1,1990 @@ +# Copyright 2024 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + annotations: + controller-gen.kubebuilder.io/version: v0.16.3 + name: serverlessworkflows.serverlessworkflow.io +spec: + group: serverlessworkflow.io + names: + kind: ServerlessWorkflow + listKind: ServerlessWorkflowList + plural: serverlessworkflows + singular: serverlessworkflow + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + description: ServerlessWorkflow ... + properties: + apiVersion: + description: |- + APIVersion defines the versioned schema of this representation of an object. + Servers should convert recognized schemas to the latest internal value, and + may reject unrecognized values. + More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources + type: string + kind: + description: |- + Kind is a string value representing the REST resource this object represents. + Servers may infer this from the endpoint the client submits requests to. + Cannot be updated. + In CamelCase. + More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds + type: string + metadata: + type: object + spec: + description: ServerlessWorkflowSpec defines a base API for integration + test with operator-sdk + properties: + annotations: + description: |- + Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important + qualities. + items: + type: string + type: array + auth: + description: |- + Auth definitions can be used to define authentication information that should be applied to resources defined + in the operation property of function definitions. It is not used as authentication information for the + function invocation, but just to access the resource containing the function invocation information. + x-kubernetes-preserve-unknown-fields: true + autoRetries: + description: AutoRetries If set to true, actions should automatically + be retried on unchecked errors. Default is false + type: boolean + constants: + additionalProperties: + description: |- + RawMessage is a raw encoded JSON value. + It implements [Marshaler] and [Unmarshaler] and can + be used to delay JSON decoding or precompute a JSON encoding. + format: byte + type: string + description: |- + Constants Workflow constants are used to define static, and immutable, data which is available to + Workflow Expressions. + type: object + dataInputSchema: + description: DataInputSchema URI or Object of the JSON Schema used + to validate the workflow data input + properties: + failOnValidationErrors: + type: boolean + schema: + type: object + required: + - failOnValidationErrors + - schema + type: object + description: + description: Workflow description. + type: string + errors: + description: Defines checked errors that can be explicitly handled + during workflow execution. + items: + description: Error declaration for workflow definitions + properties: + code: + description: |- + Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. + Should not be defined if error is set to '*'. + type: string + description: + description: OnError description. + type: string + name: + description: Name Domain-specific error name. + type: string + required: + - name + type: object + type: array + events: + items: + description: Event used to define events and their correlations + properties: + correlation: + description: Define event correlation rules for this event. + Only used for consumed events. + items: + description: Correlation define event correlation rules for + an event. Only used for `consumed` events + properties: + contextAttributeName: + description: CloudEvent Extension Context Attribute name + type: string + contextAttributeValue: + description: CloudEvent Extension Context Attribute value + type: string + required: + - contextAttributeName + type: object + type: array + dataOnly: + default: true + description: |- + If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload + and context attributes should be accessible. Defaults to true. + type: boolean + kind: + default: consumed + description: Defines the CloudEvent as either 'consumed' or + 'produced' by the workflow. Defaults to `consumed`. + enum: + - consumed + - produced + type: string + metadata: + description: Metadata information + x-kubernetes-preserve-unknown-fields: true + name: + description: Unique event name. + type: string + source: + description: CloudEvent source. + type: string + type: + description: CloudEvent type. + type: string + required: + - name + - type + type: object + type: array + expressionLang: + default: jq + description: Identifies the expression language used for workflow + expressions. Default is 'jq'. + enum: + - jq + - jsonpath + - cel + type: string + functions: + items: + description: Function ... + properties: + authRef: + description: References an auth definition name to be used to + access to resource defined in the operation parameter. + type: string + metadata: + description: Metadata information + x-kubernetes-preserve-unknown-fields: true + name: + description: Unique function name + type: string + operation: + description: |- + If type is `rest`, #. + If type is `rpc`, ##. + If type is `expression`, defines the workflow expression. If the type is `custom`, + #. + type: string + type: + default: rest + description: |- + Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. + Default is `rest`. + enum: + - rest + - rpc + - expression + - graphql + - odata + - asyncapi + - custom + type: string + required: + - name + - operation + type: object + type: array + id: + description: Workflow unique identifier + type: string + keepActive: + description: |- + If "true", workflow instances is not terminated when there are no active execution paths. + Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" + type: boolean + key: + description: Key Domain-specific workflow identifier + type: string + metadata: + description: Metadata custom information shared with the runtime. + x-kubernetes-preserve-unknown-fields: true + name: + description: Workflow name + type: string + retries: + items: + description: Retry ... + properties: + delay: + description: Time delay between retry attempts (ISO 8601 duration + format) + type: string + increment: + description: Static value by which the delay increases during + each attempt (ISO 8601 time format) + type: string + jitter: + description: If float type, maximum amount of random time added + or subtracted from the delay between each retry relative to + total delay (between 0 and 1). If string type, absolute maximum + amount of random time added or subtracted from the delay between + each retry (ISO 8601 duration format) + properties: + floatVal: + type: number + strVal: + type: string + type: + description: Type represents the stored type of Float32OrString. + format: int64 + type: integer + type: object + maxAttempts: + anyOf: + - type: integer + - type: string + description: Maximum number of retry attempts. + x-kubernetes-int-or-string: true + maxDelay: + description: Maximum time delay between retry attempts (ISO + 8601 duration format) + type: string + multiplier: + description: Numeric value, if specified the delay between retries + is multiplied by this value. + properties: + floatVal: + type: number + strVal: + type: string + type: + description: Type represents the stored type of Float32OrString. + format: int64 + type: integer + type: object + name: + description: Unique retry strategy name + type: string + required: + - maxAttempts + - name + type: object + type: array + secrets: + description: |- + Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, + inside your Workflow Expressions. + items: + type: string + type: array + specVersion: + default: "0.8" + description: Serverless Workflow schema version + type: string + start: + description: Workflow start definition. + x-kubernetes-preserve-unknown-fields: true + states: + description: States ... + items: + properties: + callbackState: + description: callbackState executes a function and waits for + callback event that indicates completion of the task. + properties: + action: + description: Defines the action to be executed. + properties: + actionDataFilter: + description: |- + Filter the state data to select only the data that can be used within function definition arguments + using its fromStateData property. Filter the action results to select only the result data that should + be added/merged back into the state data using its results property. Select the part of state data which + the action data results should be added/merged to using the toStateData property. + properties: + fromStateData: + description: Workflow expression that filters state + data that can be used by the action. + type: string + results: + description: Workflow expression that filters the + actions data results. + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be + added/merged into. If not specified denotes the top-level state data element. + type: string + useResults: + description: |- + If set to false, action data results are not added/merged to state data. In this case 'results' + and 'toStateData' should be ignored. Default is true. + type: boolean + type: object + condition: + description: Expression, if defined, must evaluate to + true for this action to be performed. If false, action + is disregarded. + type: string + eventRef: + description: References a 'trigger' and 'result' reusable + event definitions. + properties: + contextAttributes: + additionalProperties: + type: object + description: Add additional extension context attributes + to the produced event. + type: object + data: + description: |- + If string type, an expression which selects parts of the states data output to become the data (payload) + of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + of the event referenced by triggerEventRef. + type: object + invoke: + default: sync + description: Specifies if the function should be + invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + resultEventRef: + description: Reference to the unique name of a 'consumed' + event definition + type: string + resultEventTimeout: + description: |- + Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + actionExecutionTimeout + type: string + triggerEventRef: + description: Reference to the unique name of a 'produced' + event definition, + type: string + required: + - resultEventRef + - triggerEventRef + type: object + functionRef: + description: References a reusable function definition. + properties: + arguments: + additionalProperties: + type: object + description: Arguments (inputs) to be passed to + the referenced function + type: object + invoke: + default: sync + description: Specifies if the function should be + invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + refName: + description: Name of the referenced function. + type: string + selectionSet: + description: Used if function type is graphql. String + containing a valid GraphQL selection set. + type: string + required: + - refName + type: object + id: + description: Defines Unique action identifier. + type: string + name: + description: Defines Unique action name. + type: string + nonRetryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should not be retried. + Used only when `autoRetries` is set to `true` + items: + type: string + type: array + retryRef: + description: References a defined workflow retry definition. + If not defined uses the default runtime retry definition. + type: string + retryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should be retried. + Used only when `autoRetries` is set to `false` + items: + type: string + type: array + sleep: + description: Defines time period workflow execution + should sleep before / after function execution. + properties: + after: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + before: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + type: object + subFlowRef: + description: References a workflow to be invoked. + properties: + invoke: + default: sync + description: |- + Specifies if the subflow should be invoked sync or async. + Defaults to sync. + enum: + - async + - sync + type: string + onParentComplete: + default: terminate + description: |- + onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + is 'async'. Defaults to terminate. + enum: + - terminate + - continue + type: string + version: + description: Sub-workflow version + type: string + workflowId: + description: Sub-workflow unique id + type: string + required: + - workflowId + type: object + type: object + eventDataFilter: + description: Event data filter definition. + properties: + data: + description: Workflow expression that filters of the + event data (payload). + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be added/merged into. + If not specified denotes the top-level state data element + type: string + useData: + description: |- + If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' + should be ignored. Default is true. + type: boolean + type: object + eventRef: + description: References a unique callback event name in + the defined workflow events. + type: string + timeouts: + description: Time period to wait for incoming events (ISO + 8601 format) + properties: + actionExecTimeout: + description: Default single actions definition execution + timeout (ISO 8601 duration format) + type: string + eventTimeout: + description: Default timeout for consuming defined events + (ISO 8601 duration format) + type: string + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - action + - eventRef + type: object + compensatedBy: + description: Unique Name of a workflow state which is responsible + for compensation of this state. + type: string + delayState: + description: delayState Causes the workflow execution to delay + for a specified duration. + properties: + timeDelay: + description: Amount of time (ISO 8601 format) to delay + type: string + required: + - timeDelay + type: object + end: + description: State end definition. + x-kubernetes-preserve-unknown-fields: true + eventState: + description: |- + event states await one or more events and perform actions when they are received. If defined as the + workflow starting state, the event state definition controls when the workflow instances should be created. + properties: + exclusive: + default: true + description: |- + If true consuming one of the defined events causes its associated actions to be performed. If false all + the defined events must be consumed in order for actions to be performed. Defaults to true. + type: boolean + onEvents: + description: Define the events to be consumed and optional + actions to be performed. + items: + description: OnEvents define which actions are be performed + for the one or more events. + properties: + actionMode: + default: sequential + description: Should actions be performed sequentially + or in parallel. Default is sequential. + enum: + - sequential + - parallel + type: string + actions: + description: Actions to be performed if expression + matches + items: + description: Action specify invocations of services + or other workflows during workflow execution. + properties: + actionDataFilter: + description: |- + Filter the state data to select only the data that can be used within function definition arguments + using its fromStateData property. Filter the action results to select only the result data that should + be added/merged back into the state data using its results property. Select the part of state data which + the action data results should be added/merged to using the toStateData property. + properties: + fromStateData: + description: Workflow expression that filters + state data that can be used by the action. + type: string + results: + description: Workflow expression that filters + the actions data results. + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be + added/merged into. If not specified denotes the top-level state data element. + type: string + useResults: + description: |- + If set to false, action data results are not added/merged to state data. In this case 'results' + and 'toStateData' should be ignored. Default is true. + type: boolean + type: object + condition: + description: Expression, if defined, must evaluate + to true for this action to be performed. If + false, action is disregarded. + type: string + eventRef: + description: References a 'trigger' and 'result' + reusable event definitions. + properties: + contextAttributes: + additionalProperties: + type: object + description: Add additional extension context + attributes to the produced event. + type: object + data: + description: |- + If string type, an expression which selects parts of the states data output to become the data (payload) + of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + of the event referenced by triggerEventRef. + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + resultEventRef: + description: Reference to the unique name + of a 'consumed' event definition + type: string + resultEventTimeout: + description: |- + Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + actionExecutionTimeout + type: string + triggerEventRef: + description: Reference to the unique name + of a 'produced' event definition, + type: string + required: + - resultEventRef + - triggerEventRef + type: object + functionRef: + description: References a reusable function + definition. + properties: + arguments: + additionalProperties: + type: object + description: Arguments (inputs) to be passed + to the referenced function + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + refName: + description: Name of the referenced function. + type: string + selectionSet: + description: Used if function type is graphql. + String containing a valid GraphQL selection + set. + type: string + required: + - refName + type: object + id: + description: Defines Unique action identifier. + type: string + name: + description: Defines Unique action name. + type: string + nonRetryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should not be retried. + Used only when `autoRetries` is set to `true` + items: + type: string + type: array + retryRef: + description: References a defined workflow retry + definition. If not defined uses the default + runtime retry definition. + type: string + retryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should be retried. + Used only when `autoRetries` is set to `false` + items: + type: string + type: array + sleep: + description: Defines time period workflow execution + should sleep before / after function execution. + properties: + after: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + before: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + type: object + subFlowRef: + description: References a workflow to be invoked. + properties: + invoke: + default: sync + description: |- + Specifies if the subflow should be invoked sync or async. + Defaults to sync. + enum: + - async + - sync + type: string + onParentComplete: + default: terminate + description: |- + onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + is 'async'. Defaults to terminate. + enum: + - terminate + - continue + type: string + version: + description: Sub-workflow version + type: string + workflowId: + description: Sub-workflow unique id + type: string + required: + - workflowId + type: object + type: object + type: array + eventDataFilter: + description: eventDataFilter defines the callback + event data filter definition + properties: + data: + description: Workflow expression that filters + of the event data (payload). + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be added/merged into. + If not specified denotes the top-level state data element + type: string + useData: + description: |- + If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' + should be ignored. Default is true. + type: boolean + type: object + eventRefs: + description: References one or more unique event names + in the defined workflow events. + items: + type: string + minItems: 1 + type: array + required: + - eventRefs + type: object + minItems: 1 + type: array + timeouts: + description: State specific timeouts. + properties: + actionExecTimeout: + description: Default single actions definition execution + timeout (ISO 8601 duration format) + type: string + eventTimeout: + description: Default timeout for consuming defined events + (ISO 8601 duration format) + type: string + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - onEvents + type: object + forEachState: + description: forEachState used to execute actions for each element + of a data set. + properties: + actions: + description: Actions to be executed for each of the elements + of inputCollection. + items: + description: Action specify invocations of services or + other workflows during workflow execution. + properties: + actionDataFilter: + description: |- + Filter the state data to select only the data that can be used within function definition arguments + using its fromStateData property. Filter the action results to select only the result data that should + be added/merged back into the state data using its results property. Select the part of state data which + the action data results should be added/merged to using the toStateData property. + properties: + fromStateData: + description: Workflow expression that filters + state data that can be used by the action. + type: string + results: + description: Workflow expression that filters + the actions data results. + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be + added/merged into. If not specified denotes the top-level state data element. + type: string + useResults: + description: |- + If set to false, action data results are not added/merged to state data. In this case 'results' + and 'toStateData' should be ignored. Default is true. + type: boolean + type: object + condition: + description: Expression, if defined, must evaluate + to true for this action to be performed. If false, + action is disregarded. + type: string + eventRef: + description: References a 'trigger' and 'result' reusable + event definitions. + properties: + contextAttributes: + additionalProperties: + type: object + description: Add additional extension context + attributes to the produced event. + type: object + data: + description: |- + If string type, an expression which selects parts of the states data output to become the data (payload) + of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + of the event referenced by triggerEventRef. + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + resultEventRef: + description: Reference to the unique name of a + 'consumed' event definition + type: string + resultEventTimeout: + description: |- + Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + actionExecutionTimeout + type: string + triggerEventRef: + description: Reference to the unique name of a + 'produced' event definition, + type: string + required: + - resultEventRef + - triggerEventRef + type: object + functionRef: + description: References a reusable function definition. + properties: + arguments: + additionalProperties: + type: object + description: Arguments (inputs) to be passed to + the referenced function + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + refName: + description: Name of the referenced function. + type: string + selectionSet: + description: Used if function type is graphql. + String containing a valid GraphQL selection + set. + type: string + required: + - refName + type: object + id: + description: Defines Unique action identifier. + type: string + name: + description: Defines Unique action name. + type: string + nonRetryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should not be retried. + Used only when `autoRetries` is set to `true` + items: + type: string + type: array + retryRef: + description: References a defined workflow retry definition. + If not defined uses the default runtime retry definition. + type: string + retryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should be retried. + Used only when `autoRetries` is set to `false` + items: + type: string + type: array + sleep: + description: Defines time period workflow execution + should sleep before / after function execution. + properties: + after: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + before: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + type: object + subFlowRef: + description: References a workflow to be invoked. + properties: + invoke: + default: sync + description: |- + Specifies if the subflow should be invoked sync or async. + Defaults to sync. + enum: + - async + - sync + type: string + onParentComplete: + default: terminate + description: |- + onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + is 'async'. Defaults to terminate. + enum: + - terminate + - continue + type: string + version: + description: Sub-workflow version + type: string + workflowId: + description: Sub-workflow unique id + type: string + required: + - workflowId + type: object + type: object + minItems: 0 + type: array + batchSize: + anyOf: + - type: integer + - type: string + description: |- + Specifies how many iterations may run in parallel at the same time. Used if mode property is set to + parallel (default). If not specified, its value should be the size of the inputCollection. + x-kubernetes-int-or-string: true + inputCollection: + description: Workflow expression selecting an array element + of the states' data. + type: string + iterationParam: + description: |- + Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, + this param should contain a unique element of the inputCollection array. + type: string + mode: + default: parallel + description: Specifies how iterations are to be performed + (sequential or in parallel), defaults to parallel. + enum: + - sequential + - parallel + type: string + outputCollection: + description: Workflow expression specifying an array element + of the states data to add the results of each iteration. + type: string + timeouts: + description: State specific timeout. + properties: + actionExecTimeout: + description: Default single actions definition execution + timeout (ISO 8601 duration format) + type: string + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - inputCollection + type: object + id: + description: Unique State id. + type: string + injectState: + description: injectState used to inject static data into state + data input. + properties: + data: + additionalProperties: + type: object + description: JSON object which can be set as state's data + input and can be manipulated via filter + minProperties: 1 + type: object + timeouts: + description: State specific timeouts + properties: + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - data + type: object + metadata: + description: Metadata information. + x-kubernetes-preserve-unknown-fields: true + name: + description: State name. + type: string + onErrors: + description: States error handling and retries definitions. + items: + description: OnError ... + properties: + end: + description: |- + End workflow execution in case of this error. If retryRef is defined, this ends workflow only if + retries were unsuccessful. + x-kubernetes-preserve-unknown-fields: true + errorRef: + description: ErrorRef Reference to a unique workflow error + definition. Used of errorRefs is not used + type: string + errorRefs: + description: ErrorRefs References one or more workflow + error definitions. Used if errorRef is not used + items: + type: string + type: array + transition: + description: |- + Transition to next state to handle the error. If retryRef is defined, this transition is taken only if + retries were unsuccessful. + x-kubernetes-preserve-unknown-fields: true + type: object + type: array + operationState: + description: operationState defines a set of actions to be performed + in sequence or in parallel. + properties: + actionMode: + default: sequential + description: Specifies whether actions are performed in + sequence or in parallel, defaults to sequential. + enum: + - sequential + - parallel + type: string + actions: + description: Actions to be performed + items: + description: Action specify invocations of services or + other workflows during workflow execution. + properties: + actionDataFilter: + description: |- + Filter the state data to select only the data that can be used within function definition arguments + using its fromStateData property. Filter the action results to select only the result data that should + be added/merged back into the state data using its results property. Select the part of state data which + the action data results should be added/merged to using the toStateData property. + properties: + fromStateData: + description: Workflow expression that filters + state data that can be used by the action. + type: string + results: + description: Workflow expression that filters + the actions data results. + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be + added/merged into. If not specified denotes the top-level state data element. + type: string + useResults: + description: |- + If set to false, action data results are not added/merged to state data. In this case 'results' + and 'toStateData' should be ignored. Default is true. + type: boolean + type: object + condition: + description: Expression, if defined, must evaluate + to true for this action to be performed. If false, + action is disregarded. + type: string + eventRef: + description: References a 'trigger' and 'result' reusable + event definitions. + properties: + contextAttributes: + additionalProperties: + type: object + description: Add additional extension context + attributes to the produced event. + type: object + data: + description: |- + If string type, an expression which selects parts of the states data output to become the data (payload) + of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + of the event referenced by triggerEventRef. + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + resultEventRef: + description: Reference to the unique name of a + 'consumed' event definition + type: string + resultEventTimeout: + description: |- + Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + actionExecutionTimeout + type: string + triggerEventRef: + description: Reference to the unique name of a + 'produced' event definition, + type: string + required: + - resultEventRef + - triggerEventRef + type: object + functionRef: + description: References a reusable function definition. + properties: + arguments: + additionalProperties: + type: object + description: Arguments (inputs) to be passed to + the referenced function + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + refName: + description: Name of the referenced function. + type: string + selectionSet: + description: Used if function type is graphql. + String containing a valid GraphQL selection + set. + type: string + required: + - refName + type: object + id: + description: Defines Unique action identifier. + type: string + name: + description: Defines Unique action name. + type: string + nonRetryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should not be retried. + Used only when `autoRetries` is set to `true` + items: + type: string + type: array + retryRef: + description: References a defined workflow retry definition. + If not defined uses the default runtime retry definition. + type: string + retryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should be retried. + Used only when `autoRetries` is set to `false` + items: + type: string + type: array + sleep: + description: Defines time period workflow execution + should sleep before / after function execution. + properties: + after: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + before: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + type: object + subFlowRef: + description: References a workflow to be invoked. + properties: + invoke: + default: sync + description: |- + Specifies if the subflow should be invoked sync or async. + Defaults to sync. + enum: + - async + - sync + type: string + onParentComplete: + default: terminate + description: |- + onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + is 'async'. Defaults to terminate. + enum: + - terminate + - continue + type: string + version: + description: Sub-workflow version + type: string + workflowId: + description: Sub-workflow unique id + type: string + required: + - workflowId + type: object + type: object + minItems: 0 + type: array + timeouts: + description: State specific timeouts + properties: + actionExecTimeout: + description: Default single actions definition execution + timeout (ISO 8601 duration format) + type: string + stateExecTimeout: + description: Defines workflow state execution timeout. + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - actions + type: object + parallelState: + description: parallelState Consists of a number of states that + are executed in parallel. + properties: + branches: + description: List of branches for this parallel state. + items: + description: Branch Definition + properties: + actions: + description: Actions to be executed in this branch + items: + description: Action specify invocations of services + or other workflows during workflow execution. + properties: + actionDataFilter: + description: |- + Filter the state data to select only the data that can be used within function definition arguments + using its fromStateData property. Filter the action results to select only the result data that should + be added/merged back into the state data using its results property. Select the part of state data which + the action data results should be added/merged to using the toStateData property. + properties: + fromStateData: + description: Workflow expression that filters + state data that can be used by the action. + type: string + results: + description: Workflow expression that filters + the actions data results. + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be + added/merged into. If not specified denotes the top-level state data element. + type: string + useResults: + description: |- + If set to false, action data results are not added/merged to state data. In this case 'results' + and 'toStateData' should be ignored. Default is true. + type: boolean + type: object + condition: + description: Expression, if defined, must evaluate + to true for this action to be performed. If + false, action is disregarded. + type: string + eventRef: + description: References a 'trigger' and 'result' + reusable event definitions. + properties: + contextAttributes: + additionalProperties: + type: object + description: Add additional extension context + attributes to the produced event. + type: object + data: + description: |- + If string type, an expression which selects parts of the states data output to become the data (payload) + of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) + of the event referenced by triggerEventRef. + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + resultEventRef: + description: Reference to the unique name + of a 'consumed' event definition + type: string + resultEventTimeout: + description: |- + Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the + actionExecutionTimeout + type: string + triggerEventRef: + description: Reference to the unique name + of a 'produced' event definition, + type: string + required: + - resultEventRef + - triggerEventRef + type: object + functionRef: + description: References a reusable function + definition. + properties: + arguments: + additionalProperties: + type: object + description: Arguments (inputs) to be passed + to the referenced function + type: object + invoke: + default: sync + description: Specifies if the function should + be invoked sync or async. Default is sync. + enum: + - async + - sync + type: string + refName: + description: Name of the referenced function. + type: string + selectionSet: + description: Used if function type is graphql. + String containing a valid GraphQL selection + set. + type: string + required: + - refName + type: object + id: + description: Defines Unique action identifier. + type: string + name: + description: Defines Unique action name. + type: string + nonRetryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should not be retried. + Used only when `autoRetries` is set to `true` + items: + type: string + type: array + retryRef: + description: References a defined workflow retry + definition. If not defined uses the default + runtime retry definition. + type: string + retryableErrors: + description: |- + List of unique references to defined workflow errors for which the action should be retried. + Used only when `autoRetries` is set to `false` + items: + type: string + type: array + sleep: + description: Defines time period workflow execution + should sleep before / after function execution. + properties: + after: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + before: + description: |- + Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. + Does not apply if 'eventRef' is defined. + type: string + type: object + subFlowRef: + description: References a workflow to be invoked. + properties: + invoke: + default: sync + description: |- + Specifies if the subflow should be invoked sync or async. + Defaults to sync. + enum: + - async + - sync + type: string + onParentComplete: + default: terminate + description: |- + onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke + is 'async'. Defaults to terminate. + enum: + - terminate + - continue + type: string + version: + description: Sub-workflow version + type: string + workflowId: + description: Sub-workflow unique id + type: string + required: + - workflowId + type: object + type: object + minItems: 1 + type: array + name: + description: Branch name + type: string + timeouts: + description: Branch specific timeout settings + properties: + actionExecTimeout: + description: Single actions definition execution + timeout duration (ISO 8601 duration format) + type: string + branchExecTimeout: + description: Single branch execution timeout duration + (ISO 8601 duration format) + type: string + type: object + required: + - actions + - name + type: object + minItems: 1 + type: array + completionType: + default: allOf + description: Option types on how to complete branch execution. + Defaults to `allOf`. + enum: + - allOf + - atLeast + type: string + numCompleted: + anyOf: + - type: integer + - type: string + description: |- + Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete + in order for the state to transition/end. + x-kubernetes-int-or-string: true + timeouts: + description: State specific timeouts + properties: + branchExecTimeout: + description: Default single branch execution timeout + (ISO 8601 duration format) + type: string + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - branches + type: object + sleepState: + description: sleepState suspends workflow execution for a given + time duration. + properties: + duration: + description: Duration (ISO 8601 duration format) to sleep + type: string + timeouts: + description: Timeouts State specific timeouts + properties: + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - duration + type: object + stateDataFilter: + description: State data filter. + properties: + input: + description: Workflow expression to filter the state data + input + type: string + output: + description: Workflow expression that filters the state + data output + type: string + type: object + switchState: + description: 'switchState is workflow''s gateways: direct transitions + onf a workflow based on certain conditions.' + properties: + dataConditions: + description: Defines conditions evaluated against data + items: + description: |- + DataCondition specify a data-based condition statement which causes a transition to another workflow state + if evaluated to true. + properties: + condition: + description: Workflow expression evaluated against + state data. Must evaluate to true or false. + type: string + end: + description: Explicit transition to end + properties: + compensate: + description: If set to true, triggers workflow + compensation before workflow execution completes. + Default is false. + type: boolean + continueAs: + description: |- + Defines that current workflow execution should stop, and execution should continue as a new workflow + instance of the provided id + properties: + data: + description: |- + If string type, an expression which selects parts of the states data output to become the workflow data input of + continued execution. If object type, a custom object to become the workflow data input of the continued execution + type: object + version: + description: Version of the workflow to continue + execution as. + type: string + workflowExecTimeout: + description: |- + WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. + Overwrites any specific settings set by that workflow + properties: + duration: + default: unlimited + description: Workflow execution timeout + duration (ISO 8601 duration format). + If not specified should be 'unlimited'. + type: string + interrupt: + description: |- + If false, workflow instance is allowed to finish current execution. If true, current workflow execution + is stopped immediately. Default is false. + type: boolean + runBefore: + description: Name of a workflow state + to be executed before workflow instance + is terminated. + type: string + required: + - duration + type: object + workflowId: + description: Unique id of the workflow to + continue execution as. + type: string + required: + - workflowId + type: object + produceEvents: + description: Array of producedEvent definitions. + Defines events that should be produced. + items: + description: |- + ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a + workflow transitions. The eventRef property must match the name of one of the defined produced events in the + events definition. + properties: + contextAttributes: + additionalProperties: + type: string + description: Add additional event extension + context attributes. + type: object + data: + description: |- + If String, expression which selects parts of the states data output to become the data of the produced event. + If object a custom object to become the data of produced event. + type: object + eventRef: + description: Reference to a defined unique + event name in the events definition + type: string + required: + - eventRef + type: object + type: array + terminate: + description: If true, completes all execution + flows in the given workflow instance. + type: boolean + type: object + metadata: + description: Metadata information. + x-kubernetes-preserve-unknown-fields: true + name: + description: Data condition name. + type: string + transition: + description: Workflow transition if condition is evaluated + to true + properties: + compensate: + default: false + description: If set to true, triggers workflow + compensation before this transition is taken. + Default is false. + type: boolean + nextState: + description: Name of the state to transition to + next. + type: string + produceEvents: + description: Array of producedEvent definitions. + Events to be produced before the transition + takes place. + items: + description: |- + ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a + workflow transitions. The eventRef property must match the name of one of the defined produced events in the + events definition. + properties: + contextAttributes: + additionalProperties: + type: string + description: Add additional event extension + context attributes. + type: object + data: + description: |- + If String, expression which selects parts of the states data output to become the data of the produced event. + If object a custom object to become the data of produced event. + type: object + eventRef: + description: Reference to a defined unique + event name in the events definition + type: string + required: + - eventRef + type: object + type: array + required: + - nextState + type: object + required: + - condition + - end + type: object + type: array + defaultCondition: + description: |- + Default transition of the workflow if there is no matching data conditions. Can include a transition or + end definition. + properties: + end: + description: "\tIf this state an end state" + x-kubernetes-preserve-unknown-fields: true + transition: + description: |- + Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). + Each state can define a transition definition that is used to determine which state to transition to next. + x-kubernetes-preserve-unknown-fields: true + type: object + eventConditions: + description: Defines conditions evaluated against events. + items: + description: EventCondition specify events which the switch + state must wait for. + properties: + end: + description: Explicit transition to end + x-kubernetes-preserve-unknown-fields: true + eventDataFilter: + description: Event data filter definition. + properties: + data: + description: Workflow expression that filters + of the event data (payload). + type: string + toStateData: + description: |- + Workflow expression that selects a state data element to which the action results should be added/merged into. + If not specified denotes the top-level state data element + type: string + useData: + description: |- + If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' + should be ignored. Default is true. + type: boolean + type: object + eventRef: + description: References a unique event name in the + defined workflow events. + type: string + metadata: + description: Metadata information. + x-kubernetes-preserve-unknown-fields: true + name: + description: Event condition name. + type: string + transition: + description: Workflow transition if condition is evaluated + to true + x-kubernetes-preserve-unknown-fields: true + required: + - eventRef + type: object + type: array + timeouts: + description: SwitchState specific timeouts + properties: + eventTimeout: + description: |- + Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. + NOTE: this is only available for EventConditions + type: string + stateExecTimeout: + description: Default workflow state execution timeout + (ISO 8601 duration format) + properties: + single: + description: Single state execution timeout, not + including retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including + retries (ISO 8601 duration format) + type: string + required: + - total + type: object + type: object + required: + - defaultCondition + type: object + transition: + description: Next transition of the workflow after the time + delay. + x-kubernetes-preserve-unknown-fields: true + type: + description: stateType can be any of delay, callback, event, + foreach, inject, operation, parallel, sleep, switch + enum: + - delay + - callback + - event + - foreach + - inject + - operation + - parallel + - sleep + - switch + type: string + usedForCompensation: + description: If true, this state is used to compensate another + state. Default is false. + type: boolean + required: + - name + - type + type: object + minItems: 1 + type: array + x-kubernetes-preserve-unknown-fields: true + timeouts: + description: Defines the workflow default timeout settings. + properties: + actionExecTimeout: + description: ActionExecTimeout Single actions definition execution + timeout duration (ISO 8601 duration format). + type: string + branchExecTimeout: + description: BranchExecTimeout Single branch execution timeout + duration (ISO 8601 duration format). + type: string + eventTimeout: + description: EventTimeout Timeout duration to wait for consuming + defined events (ISO 8601 duration format). + type: string + stateExecTimeout: + description: StateExecTimeout Total state execution timeout (including + retries) (ISO 8601 duration format). + properties: + single: + description: Single state execution timeout, not including + retries (ISO 8601 duration format) + type: string + total: + description: Total state execution timeout, including retries + (ISO 8601 duration format) + type: string + required: + - total + type: object + workflowExecTimeout: + description: |- + WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should + be 'unlimited'. + properties: + duration: + default: unlimited + description: Workflow execution timeout duration (ISO 8601 + duration format). If not specified should be 'unlimited'. + type: string + interrupt: + description: |- + If false, workflow instance is allowed to finish current execution. If true, current workflow execution + is stopped immediately. Default is false. + type: boolean + runBefore: + description: Name of a workflow state to be executed before + workflow instance is terminated. + type: string + required: + - duration + type: object + type: object + version: + description: Workflow version. + type: string + required: + - specVersion + - states + type: object + status: + description: ServerlessWorkflowStatus ... + properties: + observedGeneration: + format: int64 + type: integer + type: object + type: object + served: true + storage: true + subresources: + status: {} diff --git a/kubernetes/workflow_cr_example.yaml b/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml similarity index 97% rename from kubernetes/workflow_cr_example.yaml rename to config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml index 5d79bfa..5faa2c0 100644 --- a/kubernetes/workflow_cr_example.yaml +++ b/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml @@ -15,7 +15,7 @@ # This is an example on how the CR would look like when importing the sdk go types into your operator's spec # the cr instantiation would be validated by this issue: https://github.com/serverlessworkflow/sdk-go/issues/152 -apiVersion: io.serverlessworkflow/v08 +apiVersion: io.serverlessworkflow/v1alpha1 kind: ServerlessWorkflow metadata: name: custom.greeting diff --git a/kubernetes/groupversion_info.go b/kubernetes/api/v1alpha1/groupversion_info.go similarity index 90% rename from kubernetes/groupversion_info.go rename to kubernetes/api/v1alpha1/groupversion_info.go index 9b85567..135263e 100644 --- a/kubernetes/groupversion_info.go +++ b/kubernetes/api/v1alpha1/groupversion_info.go @@ -12,10 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package kubernetes contains API Schema definitions for the serverless v1alpha1 API group +// Package v1alpha1 contains API Schema definitions for the serverless v1alpha1 API group // +kubebuilder:object:generate=true -// +groupName=sdk.serverless.workflow -package kubernetes +// +groupName=serverlessworkflow.io +package v1alpha1 import ( "k8s.io/apimachinery/pkg/runtime/schema" diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/api/v1alpha1/serverlessworkflow_types.go similarity index 87% rename from kubernetes/k8s_workflow_integration.go rename to kubernetes/api/v1alpha1/serverlessworkflow_types.go index 0f929c0..7144062 100644 --- a/kubernetes/k8s_workflow_integration.go +++ b/kubernetes/api/v1alpha1/serverlessworkflow_types.go @@ -12,12 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package kubernetes +package v1alpha1 import ( "github.com/serverlessworkflow/sdk-go/v2/model" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime" ) // This package provides a very simple api for kubernetes operator to test the integration @@ -36,6 +35,7 @@ import ( // TODO add a webhook example // ServerlessWorkflowSpec defines a base API for integration test with operator-sdk +// +k8s:openapi-gen=true type ServerlessWorkflowSpec struct { model.Workflow `json:",inline"` } @@ -49,8 +49,17 @@ type ServerlessWorkflow struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` - Spec ServerlessWorkflowSpec `json:"spec,omitempty"` - Status string `json:"status,omitempty"` + Spec ServerlessWorkflowSpec `json:"spec,omitempty"` + Status ServerlessWorkflowStatus `json:"status,omitempty"` +} + +// ServerlessWorkflowStatus ... +// +k8s:openapi-gen=true +type ServerlessWorkflowStatus struct { + // add your conditions struct here ... + + // +optional + ObservedGeneration int64 `json:"observedGeneration,omitempty"` } // ServerlessWorkflowList contains a list of SDKServerlessWorkflow @@ -62,16 +71,6 @@ type ServerlessWorkflowList struct { Items []ServerlessWorkflow `json:"items"` } -func (S ServerlessWorkflowList) DeepCopyObject() runtime.Object { - //TODO implement me - panic("implement me") -} - -func (S ServerlessWorkflow) DeepCopyObject() runtime.Object { - //TODO implement me - panic("implement me") -} - func init() { SchemeBuilder.Register(&ServerlessWorkflow{}, &ServerlessWorkflowList{}) } diff --git a/kubernetes/api/v1alpha1/zz_generated.deepcopy.go b/kubernetes/api/v1alpha1/zz_generated.deepcopy.go new file mode 100644 index 0000000..453a82c --- /dev/null +++ b/kubernetes/api/v1alpha1/zz_generated.deepcopy.go @@ -0,0 +1,113 @@ +//go:build !ignore_autogenerated + +// Copyright 2023 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by controller-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ServerlessWorkflow) DeepCopyInto(out *ServerlessWorkflow) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + out.Status = in.Status +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflow. +func (in *ServerlessWorkflow) DeepCopy() *ServerlessWorkflow { + if in == nil { + return nil + } + out := new(ServerlessWorkflow) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *ServerlessWorkflow) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ServerlessWorkflowList) DeepCopyInto(out *ServerlessWorkflowList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]ServerlessWorkflow, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowList. +func (in *ServerlessWorkflowList) DeepCopy() *ServerlessWorkflowList { + if in == nil { + return nil + } + out := new(ServerlessWorkflowList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *ServerlessWorkflowList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ServerlessWorkflowSpec) DeepCopyInto(out *ServerlessWorkflowSpec) { + *out = *in + in.Workflow.DeepCopyInto(&out.Workflow) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowSpec. +func (in *ServerlessWorkflowSpec) DeepCopy() *ServerlessWorkflowSpec { + if in == nil { + return nil + } + out := new(ServerlessWorkflowSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ServerlessWorkflowStatus) DeepCopyInto(out *ServerlessWorkflowStatus) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowStatus. +func (in *ServerlessWorkflowStatus) DeepCopy() *ServerlessWorkflowStatus { + if in == nil { + return nil + } + out := new(ServerlessWorkflowStatus) + in.DeepCopyInto(out) + return out +} diff --git a/model/common.go b/model/common.go index 6a9be3b..6993de7 100644 --- a/model/common.go +++ b/model/common.go @@ -18,6 +18,8 @@ package model type Common struct { // Metadata information // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Metadata Metadata `json:"metadata,omitempty"` } diff --git a/model/states.go b/model/states.go index fa834f7..a19429d 100644 --- a/model/states.go +++ b/model/states.go @@ -100,6 +100,8 @@ type BaseState struct { End *End `json:"end,omitempty"` // Metadata information. // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Metadata *Metadata `json:"metadata,omitempty"` } diff --git a/model/switch_state.go b/model/switch_state.go index 15d1a6d..88d0c83 100644 --- a/model/switch_state.go +++ b/model/switch_state.go @@ -141,6 +141,8 @@ type DataCondition struct { Condition string `json:"condition" validate:"required"` // Metadata information. // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Metadata Metadata `json:"metadata,omitempty"` // TODO End or Transition needs to be exclusive tag, one or another should be set. // Explicit transition to end diff --git a/parser/parser_test.go b/parser/parser_test.go index 8cc3de1..760f181 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -237,6 +237,7 @@ func TestFromFile(t *testing.T) { // metadata assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) assert.Equal(t, model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.Bearer.Metadata) + assert.Equal(t, &model.Metadata{"metadataState": model.FromString("state info")}, w.States[0].Metadata) }, }, { "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json index cd7391d..0bdfe5f 100644 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ b/parser/testdata/workflows/applicationrequest.multiauth.json @@ -48,6 +48,9 @@ { "name": "CheckApplication", "type": "switch", + "metadata": { + "metadataState": "state info" + }, "dataConditions": [ { "condition": "${ .applicants | .age >= 18 }", From 3ee6317b72cf000ce80bc48bc3964044e384cbe8 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 9 Oct 2024 14:50:49 -0400 Subject: [PATCH 090/110] Fix #195 - Fix 'End' model when is defined (#212) Signed-off-by: Ricardo Zanini --- model/action_validator.go | 2 +- model/workflow_validator.go | 4 +- .../testdata/workflows/compensation.sw.json | 72 +++++++++++++++++++ 3 files changed, 75 insertions(+), 3 deletions(-) create mode 100644 parser/testdata/workflows/compensation.sw.json diff --git a/model/action_validator.go b/model/action_validator.go index 384469b..3fac375 100644 --- a/model/action_validator.go +++ b/model/action_validator.go @@ -39,7 +39,7 @@ func actionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator. action.SubFlowRef != nil, } - if validationNotExclusiveParamters(values) { + if validationNotExclusiveParameters(values) { structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", val.TagExclusive, "") structLevel.ReportError(action.EventRef, "EventRef", "EventRef", val.TagExclusive, "") structLevel.ReportError(action.SubFlowRef, "SubFlowRef", "SubFlowRef", val.TagExclusive, "") diff --git a/model/workflow_validator.go b/model/workflow_validator.go index fd3d7bb..dd9d1e7 100644 --- a/model/workflow_validator.go +++ b/model/workflow_validator.go @@ -217,7 +217,7 @@ func transitionStructLevelValidationCtx(ctx ValidatorContext, structLevel valida func validTransitionAndEnd(structLevel validator.StructLevel, field any, transition *Transition, end *End) { hasTransition := transition != nil - isEnd := end != nil && (end.Terminate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end + isEnd := end != nil && (end.Terminate || end.Compensate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end if !hasTransition && !isEnd { structLevel.ReportError(field, "Transition", "transition", val.TagRequired, "") @@ -226,7 +226,7 @@ func validTransitionAndEnd(structLevel validator.StructLevel, field any, transit } } -func validationNotExclusiveParamters(values []bool) bool { +func validationNotExclusiveParameters(values []bool) bool { hasOne := false hasTwo := false diff --git a/parser/testdata/workflows/compensation.sw.json b/parser/testdata/workflows/compensation.sw.json new file mode 100644 index 0000000..567a501 --- /dev/null +++ b/parser/testdata/workflows/compensation.sw.json @@ -0,0 +1,72 @@ +{ + "id": "compensation", + "version": "1.0", + "name": "Workflow Error example", + "description": "An example of how compensation works", + "start": "printStatus", + "states": [ + { + "name": "printStatus", + "type": "inject", + "data": { + "compensated": false + }, + "compensatedBy" : "compensating", + "transition": "branch" + }, + { + "name": "branch", + "type": "switch", + "dataConditions": [ + { + "condition": ".shouldCompensate==true", + "transition": { + "nextState" : "finish_compensate", + "compensate" : true + } + }, + { + "condition": ".shouldCompensate==false", + "transition": { + "nextState" : "finish_not_compensate", + "compensate" : false + } + } + ], + "defaultCondition": { + "end": true + } + }, + { + "name": "compensating", + "usedForCompensation" : true, + "type": "inject", + "data": { + "compensated": true + }, + "transition" : "compensating_more" + }, + { + "name": "compensating_more", + "usedForCompensation" : true, + "type": "inject", + "data": { + "compensating_more": "Real Betis Balompie" + } + }, + { + "name": "finish_compensate", + "type": "operation", + "actions": [], + "end": { + "compensate": true + } + }, + { + "name": "finish_not_compensate", + "type": "operation", + "actions": [], + "end": true + } + ] +} \ No newline at end of file From 44c3948465344093b57100643c17781e9a1c9685 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 9 Oct 2024 14:51:59 -0400 Subject: [PATCH 091/110] Update readme to release 2.4.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 85864f9..081e9a4 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.3.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.3.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.4.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.3.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 0b585d7eb979a2eca2f0daf56bf37a25ef06172a Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 11 Oct 2024 12:07:52 -0400 Subject: [PATCH 092/110] Fix #213 - Review DataInputSchema unmarshal function; review k8s annotations for Object type (#214) Signed-off-by: Ricardo Zanini --- ...erlessworkflow.io_serverlessworkflows.yaml | 63 ++++++------------- model/action.go | 2 + model/common.go | 2 + model/event.go | 4 ++ model/inject_state.go | 3 +- model/object.go | 17 ++--- model/workflow.go | 10 ++- parser/parser_test.go | 11 ---- .../workflows/dataInputSchemaNotExists.yaml | 53 ++++++++++++++++ 9 files changed, 100 insertions(+), 65 deletions(-) create mode 100644 parser/testdata/workflows/dataInputSchemaNotExists.yaml diff --git a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml index 5584a65..1dfc265 100644 --- a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml +++ b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml @@ -90,7 +90,7 @@ spec: failOnValidationErrors: type: boolean schema: - type: object + x-kubernetes-preserve-unknown-fields: true required: - failOnValidationErrors - schema @@ -357,17 +357,15 @@ spec: event definitions. properties: contextAttributes: - additionalProperties: - type: object description: Add additional extension context attributes to the produced event. - type: object + x-kubernetes-preserve-unknown-fields: true data: description: |- If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) of the event referenced by triggerEventRef. - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should be @@ -397,11 +395,9 @@ spec: description: References a reusable function definition. properties: arguments: - additionalProperties: - type: object description: Arguments (inputs) to be passed to the referenced function - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should be @@ -630,17 +626,15 @@ spec: reusable event definitions. properties: contextAttributes: - additionalProperties: - type: object description: Add additional extension context attributes to the produced event. - type: object + x-kubernetes-preserve-unknown-fields: true data: description: |- If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) of the event referenced by triggerEventRef. - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -671,11 +665,9 @@ spec: definition. properties: arguments: - additionalProperties: - type: object description: Arguments (inputs) to be passed to the referenced function - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -875,17 +867,15 @@ spec: event definitions. properties: contextAttributes: - additionalProperties: - type: object description: Add additional extension context attributes to the produced event. - type: object + x-kubernetes-preserve-unknown-fields: true data: description: |- If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) of the event referenced by triggerEventRef. - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -915,11 +905,9 @@ spec: description: References a reusable function definition. properties: arguments: - additionalProperties: - type: object description: Arguments (inputs) to be passed to the referenced function - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -1074,12 +1062,9 @@ spec: data input. properties: data: - additionalProperties: - type: object description: JSON object which can be set as state's data input and can be manipulated via filter - minProperties: 1 - type: object + x-kubernetes-preserve-unknown-fields: true timeouts: description: State specific timeouts properties: @@ -1189,17 +1174,15 @@ spec: event definitions. properties: contextAttributes: - additionalProperties: - type: object description: Add additional extension context attributes to the produced event. - type: object + x-kubernetes-preserve-unknown-fields: true data: description: |- If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) of the event referenced by triggerEventRef. - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -1229,11 +1212,9 @@ spec: description: References a reusable function definition. properties: arguments: - additionalProperties: - type: object description: Arguments (inputs) to be passed to the referenced function - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -1401,17 +1382,15 @@ spec: reusable event definitions. properties: contextAttributes: - additionalProperties: - type: object description: Add additional extension context attributes to the produced event. - type: object + x-kubernetes-preserve-unknown-fields: true data: description: |- If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) of the event referenced by triggerEventRef. - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -1442,11 +1421,9 @@ spec: definition. properties: arguments: - additionalProperties: - type: object description: Arguments (inputs) to be passed to the referenced function - type: object + x-kubernetes-preserve-unknown-fields: true invoke: default: sync description: Specifies if the function should @@ -1675,7 +1652,7 @@ spec: description: |- If string type, an expression which selects parts of the states data output to become the workflow data input of continued execution. If object type, a custom object to become the workflow data input of the continued execution - type: object + x-kubernetes-preserve-unknown-fields: true version: description: Version of the workflow to continue execution as. @@ -1730,7 +1707,7 @@ spec: description: |- If String, expression which selects parts of the states data output to become the data of the produced event. If object a custom object to become the data of produced event. - type: object + x-kubernetes-preserve-unknown-fields: true eventRef: description: Reference to a defined unique event name in the events definition @@ -1784,7 +1761,7 @@ spec: description: |- If String, expression which selects parts of the states data output to become the data of the produced event. If object a custom object to become the data of produced event. - type: object + x-kubernetes-preserve-unknown-fields: true eventRef: description: Reference to a defined unique event name in the events definition diff --git a/model/action.go b/model/action.go index 7bc4fba..2635849 100644 --- a/model/action.go +++ b/model/action.go @@ -80,6 +80,8 @@ type FunctionRef struct { RefName string `json:"refName" validate:"required"` // Arguments (inputs) to be passed to the referenced function // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields // TODO: validate it as required if function type is graphql Arguments map[string]Object `json:"arguments,omitempty"` // Used if function type is graphql. String containing a valid GraphQL selection set. diff --git a/model/common.go b/model/common.go index 6993de7..3d4f000 100644 --- a/model/common.go +++ b/model/common.go @@ -24,4 +24,6 @@ type Common struct { } // Metadata information +// +kubebuilder:pruning:PreserveUnknownFields +// +kubebuilder:validation:Schemaless type Metadata map[string]Object diff --git a/model/event.go b/model/event.go index 96069bf..bad1ce4 100644 --- a/model/event.go +++ b/model/event.go @@ -106,9 +106,13 @@ type EventRef struct { // of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) // of the event referenced by triggerEventRef. // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Data *Object `json:"data,omitempty"` // Add additional extension context attributes to the produced event. // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` // Specifies if the function should be invoked sync or async. Default is sync. // +kubebuilder:validation:Enum=async;sync diff --git a/model/inject_state.go b/model/inject_state.go index a195423..e3995c8 100644 --- a/model/inject_state.go +++ b/model/inject_state.go @@ -21,7 +21,8 @@ import ( // InjectState used to inject static data into state data input. type InjectState struct { // JSON object which can be set as state's data input and can be manipulated via filter - // +kubebuilder:validation:MinProperties=1 + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Data map[string]Object `json:"data" validate:"required,min=1"` // State specific timeouts // +optional diff --git a/model/object.go b/model/object.go index b8360a7..e19d7b0 100644 --- a/model/object.go +++ b/model/object.go @@ -44,15 +44,16 @@ const ( // - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is // parsed to int32 // -// +kubebuilder:validation:Type=object +// +kubebuilder:pruning:PreserveUnknownFields +// +kubebuilder:validation:Schemaless type Object struct { - Type Type `json:"type,inline"` - StringValue string `json:"strVal,inline"` - IntValue int32 `json:"intVal,inline"` - FloatValue float64 - MapValue map[string]Object - SliceValue []Object - BoolValue bool `json:"boolValue,inline"` + Type Type `json:"type,inline"` + StringValue string `json:"strVal,inline"` + IntValue int32 `json:"intVal,inline"` + FloatValue float64 `json:"floatVal,inline"` + MapValue map[string]Object `json:"mapVal,inline"` + SliceValue []Object `json:"sliceVal,inline"` + BoolValue bool `json:"boolValue,inline"` } // UnmarshalJSON implements json.Unmarshaler diff --git a/model/workflow.go b/model/workflow.go index aa72d1f..54723bb 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -154,9 +154,9 @@ type BaseWorkflow struct { // +optional KeepActive bool `json:"keepActive,omitempty"` // Metadata custom information shared with the runtime. + // +optional // +kubebuilder:validation:Schemaless // +kubebuilder:pruning:PreserveUnknownFields - // +optional Metadata Metadata `json:"metadata,omitempty"` // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false // +optional @@ -471,6 +471,8 @@ type ContinueAs struct { // If string type, an expression which selects parts of the states data output to become the workflow data input of // continued execution. If object type, a custom object to become the workflow data input of the continued execution // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Data Object `json:"data,omitempty"` // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. // Overwrites any specific settings set by that workflow @@ -495,6 +497,8 @@ type ProduceEvent struct { // If String, expression which selects parts of the states data output to become the data of the produced event. // If object a custom object to become the data of produced event. // +optional + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Data Object `json:"data,omitempty"` // Add additional event extension context attributes. // +optional @@ -513,6 +517,8 @@ type StateDataFilter struct { // +builder-gen:new-call=ApplyDefault type DataInputSchema struct { // +kubebuilder:validation:Required + // +kubebuilder:validation:Schemaless + // +kubebuilder:pruning:PreserveUnknownFields Schema *Object `json:"schema" validate:"required"` // +kubebuilder:validation:Required FailOnValidationErrors bool `json:"failOnValidationErrors"` @@ -557,7 +563,7 @@ func (d *DataInputSchema) UnmarshalJSON(data []byte) error { } d.Schema = new(Object) - return util.UnmarshalObjectOrFile("schema", data, &d.Schema) + return util.UnmarshalObject("schema", data, &d.Schema) } // ApplyDefault set the default values for Data Input Schema diff --git a/parser/parser_test.go b/parser/parser_test.go index 760f181..daf6608 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -581,17 +581,6 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) assert.Equal(t, true, w.States[10].End.Terminate) }, - }, { - "./testdata/workflows/dataInputSchemaValidation.yaml", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.DataInputSchema) - expected := model.DataInputSchema{} - data, err := util.LoadExternalResource("file://testdata/datainputschema.json") - err1 := util.UnmarshalObject("schema", data, &expected.Schema) - assert.Nil(t, err) - assert.Nil(t, err1) - assert.Equal(t, expected.Schema, w.DataInputSchema.Schema) - assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) - }, }, { "./testdata/workflows/dataInputSchemaObject.json", func(t *testing.T, w *model.Workflow) { assert.NotNil(t, w.DataInputSchema) diff --git a/parser/testdata/workflows/dataInputSchemaNotExists.yaml b/parser/testdata/workflows/dataInputSchemaNotExists.yaml new file mode 100644 index 0000000..7aa3712 --- /dev/null +++ b/parser/testdata/workflows/dataInputSchemaNotExists.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: orderprocessing +version: '1.0' +specVersion: '0.8' +start: ChooseOnLanguage +dataInputSchema: + schema: doesnexist.json + failOnValidationErrors: true +functions: + - name: greetFunction + type: custom + operation: sysout +states: + - name: ChooseOnLanguage + type: switch + dataConditions: + - condition: "${ .language == \"English\" }" + transition: GreetInEnglish + - condition: "${ .language == \"Spanish\" }" + transition: GreetInSpanish + defaultCondition: GreetInEnglish + - name: GreetInEnglish + type: inject + data: + greeting: "Hello from JSON Workflow, " + transition: GreetPerson + - name: GreetInSpanish + type: inject + data: + greeting: "Saludos desde JSON Workflow, " + transition: GreetPerson + - name: GreetPerson + type: operation + actions: + - name: greetAction + functionRef: + refName: greetFunction + arguments: + message: ".greeting+.name" + end: true \ No newline at end of file From fc2004639970ddbd85f06c01b3d3350e5914bd9d Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 15 Oct 2024 13:41:21 -0400 Subject: [PATCH 093/110] Bump version --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 081e9a4..e4d87c6 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Current status of features implemented in the SDK is listed in the table below: | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.4.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.3.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.4.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | ## How to use From 62685f1544195736c9723b6d5852ae2d423dd9e5 Mon Sep 17 00:00:00 2001 From: Roberto Oliveira Date: Wed, 6 Nov 2024 11:35:07 -0500 Subject: [PATCH 094/110] use go 1.22.0 instead of 1.22.8 (#215) Signed-off-by: Roberto Oliveira --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 0d7ee6b..78d1ab8 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/serverlessworkflow/sdk-go/v2 -go 1.22.8 +go 1.22.0 toolchain go1.23.1 From 37343d20996ff7b5b1d35307932eb079a7d45c18 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 11:25:03 -0500 Subject: [PATCH 095/110] chore(deps): bump golang.org/x/crypto from 0.28.0 to 0.31.0 (#216) Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.28.0 to 0.31.0. - [Commits](https://github.com/golang/crypto/compare/v0.28.0...v0.31.0) --- updated-dependencies: - dependency-name: golang.org/x/crypto dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 6 +++--- go.sum | 16 ++++++---------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/go.mod b/go.mod index 78d1ab8..8211c9e 100644 --- a/go.mod +++ b/go.mod @@ -31,10 +31,10 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/x448/float16 v0.8.4 // indirect - golang.org/x/crypto v0.28.0 // indirect + golang.org/x/crypto v0.31.0 // indirect golang.org/x/net v0.26.0 // indirect - golang.org/x/sys v0.26.0 // indirect - golang.org/x/text v0.19.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect k8s.io/klog/v2 v2.130.1 // indirect diff --git a/go.sum b/go.sum index 37144b7..554148a 100644 --- a/go.sum +++ b/go.sum @@ -4,8 +4,6 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= @@ -16,8 +14,6 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= -github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= @@ -69,8 +65,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -85,12 +81,12 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= From e2f1f94acc59b0747f3e5ef25385c1a076b450b4 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 18 Dec 2024 11:27:23 -0500 Subject: [PATCH 096/110] Remove codecov --- .github/workflows/Go-SDK-PR-Check.yaml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index aecd842..aad9ec9 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -72,16 +72,6 @@ jobs: version: latest - name: Install cover run: go get -modfile=tools.mod golang.org/x/tools/cmd/cover - - name: Validate codcov yaml file - run: curl -vvv --data-binary @codecov.yml https://codecov.io/validate - name: Run Unit Tests run: | go test ./... -coverprofile test_coverage.out -covermode=atomic - - name: Upload results to codecov - uses: codecov/codecov-action@v4 - with: - files: ./test_coverage.out - flags: sdk-go - name: sdk-go - fail_ci_if_error: true - verbose: true From bc3336e242cfe856b198cc99aed0cff5c7bee5bb Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 23 Dec 2024 09:55:33 -0500 Subject: [PATCH 097/110] NO-ISSUE: Remove unnecessary GH workflows and fix sec advisories (#217) Signed-off-by: Ricardo Zanini --- .github/labeler.yml | 8 +++- .github/labels.yml | 27 ------------ .../Go-SDK-Check-k8s-integration.yaml | 4 ++ .github/workflows/Go-SDK-PR-Check.yaml | 6 ++- .github/workflows/prow_commands.yml | 43 ------------------- .../prow_cron_pull_request_merge.yml | 28 ------------ .github/workflows/prow_remove_lgtm.yml | 25 ----------- .github/workflows/pull_request_labeler.yml | 8 ++-- .github/workflows/stale.yaml | 4 +- 9 files changed, 21 insertions(+), 132 deletions(-) delete mode 100644 .github/labels.yml delete mode 100644 .github/workflows/prow_commands.yml delete mode 100644 .github/workflows/prow_cron_pull_request_merge.yml delete mode 100644 .github/workflows/prow_remove_lgtm.yml diff --git a/.github/labeler.yml b/.github/labeler.yml index 5d4b3d7..49abd17 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -12,5 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -source: - - '**/*' \ No newline at end of file +"documentation :notebook:": + - changed-files: + - any-glob-to-any-file: ['contrib/*', '**/*.md'] +kubernetes: + - changed-files: + - any-glob-to-any-file: ['kubernetes/*', 'hack/builder-gen.sh', 'hack/deepcopy-gen.sh', 'Makefile'] diff --git a/.github/labels.yml b/.github/labels.yml deleted file mode 100644 index 7f6aa94..0000000 --- a/.github/labels.yml +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -area: - - 'bug' - - 'important' - - 'feature' - -kind: - - 'failing-test' - - 'cleanup' - -priority: - - 'P0' - - 'P1' - - 'P2' diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml index f91b740..6d97acb 100644 --- a/.github/workflows/Go-SDK-Check-k8s-integration.yaml +++ b/.github/workflows/Go-SDK-Check-k8s-integration.yaml @@ -22,6 +22,10 @@ on: - "Makefile" branches: - main + +permissions: + contents: read + env: GO_VERSION: 1.22 jobs: diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index aad9ec9..7b8a8a3 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -22,6 +22,10 @@ on: - "Makefile" branches: - main + +permissions: + contents: read + env: GO_VERSION: 1.22 jobs: @@ -67,7 +71,7 @@ jobs: changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) [[ -z "$changed_files" ]] || (printf "Some files are not formatted properly: \n$changed_files\n Did you run 'make test' before sending the PR?" && exit 1) - name: Check lint - uses: golangci/golangci-lint-action@v6 + uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues with: version: latest - name: Install cover diff --git a/.github/workflows/prow_commands.yml b/.github/workflows/prow_commands.yml deleted file mode 100644 index d854fae..0000000 --- a/.github/workflows/prow_commands.yml +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: "Prow github actions" -on: - issue_comment: - types: [created] - -jobs: - execute: - runs-on: ubuntu-latest - steps: - - uses: jpmcb/prow-github-actions@v1.1.3 - with: - prow-commands: | - /assign - /unassign - /approve - /retitle - /area - /kind - /priority - /remove - /lgtm - /close - /reopen - /lock - /milestone - /hold - /cc - /uncc - github-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file diff --git a/.github/workflows/prow_cron_pull_request_merge.yml b/.github/workflows/prow_cron_pull_request_merge.yml deleted file mode 100644 index e48e6e1..0000000 --- a/.github/workflows/prow_cron_pull_request_merge.yml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: "Merge on lgtm label" -on: - schedule: - - cron: "30 * * * *" - -jobs: - execute: - runs-on: ubuntu-latest - steps: - - uses: jpmcb/prow-github-actions@v1.1.3 - with: - jobs: 'lgtm' - github-token: "${{ secrets.GITHUB_TOKEN }}" - merge-method: 'squash' \ No newline at end of file diff --git a/.github/workflows/prow_remove_lgtm.yml b/.github/workflows/prow_remove_lgtm.yml deleted file mode 100644 index 455f696..0000000 --- a/.github/workflows/prow_remove_lgtm.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: "Pull Request update lgtm" -on: pull_request - -jobs: - execute: - runs-on: ubuntu-latest - steps: - - uses: jpmcb/prow-github-actions@v1.1.3 - with: - jobs: 'lgtm' - github-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file diff --git a/.github/workflows/pull_request_labeler.yml b/.github/workflows/pull_request_labeler.yml index f8eab65..f270294 100644 --- a/.github/workflows/pull_request_labeler.yml +++ b/.github/workflows/pull_request_labeler.yml @@ -14,15 +14,13 @@ name: "Pull Request Labeler" on: -- pull_request_target + - pull_request_target jobs: - triage: + labeler: permissions: contents: read pull-requests: write runs-on: ubuntu-latest steps: - - uses: actions/labeler@v4.0.2 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file + - uses: actions/labeler@v5 \ No newline at end of file diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index ccd7367..826fe00 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -16,7 +16,9 @@ name: Mark stale issues and pull requests on: schedule: - cron: "0 0 * * *" - +permissions: + issues: write + pull-requests: write jobs: stale: runs-on: ubuntu-latest From b9ff81ba671019d51ce8bd92314e91501f7959c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 17:56:00 -0500 Subject: [PATCH 098/110] chore(deps): bump golang.org/x/net from 0.26.0 to 0.33.0 (#219) Bumps [golang.org/x/net](https://github.com/golang/net) from 0.26.0 to 0.33.0. - [Commits](https://github.com/golang/net/compare/v0.26.0...v0.33.0) --- updated-dependencies: - dependency-name: golang.org/x/net dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8211c9e..d2e3b7e 100644 --- a/go.mod +++ b/go.mod @@ -32,7 +32,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/x448/float16 v0.8.4 // indirect golang.org/x/crypto v0.31.0 // indirect - golang.org/x/net v0.26.0 // indirect + golang.org/x/net v0.33.0 // indirect golang.org/x/sys v0.28.0 // indirect golang.org/x/text v0.21.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect diff --git a/go.sum b/go.sum index 554148a..393de63 100644 --- a/go.sum +++ b/go.sum @@ -73,8 +73,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= -golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= From ff500a090258a53544b6e5451518d390e62936cc Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 22 Jan 2025 16:49:31 -0500 Subject: [PATCH 099/110] Fix #203 - Update parsers to DSL 1.0.0 (#218) --- .../Go-SDK-Check-k8s-integration.yaml | 62 - .github/workflows/Go-SDK-PR-Check.yaml | 77 +- .gitignore | 1 + Makefile | 42 +- README.md | 158 +- builder/builder.go | 36 +- builder/builder_test.go | 223 +- ...erlessworkflow.io_serverlessworkflows.yaml | 1967 ----------- ...rkflow.io_v1alpha1_serverlessworkflow.yaml | 61 - go.mod | 45 +- go.sum | 124 +- hack/builder-gen.sh | 48 - hack/conv/main.go | 128 - hack/deepcopy-gen.sh | 49 - hack/integration-test.sh | 98 + kubernetes/api/v1alpha1/groupversion_info.go | 39 - .../api/v1alpha1/serverlessworkflow_types.go | 76 - .../api/v1alpha1/zz_generated.deepcopy.go | 113 - model/action.go | 127 - model/action_data_filter.go | 50 - model/action_data_filter_test.go | 83 - model/action_test.go | 83 - model/action_validator.go | 58 - model/action_validator_test.go | 203 -- model/auth.go | 221 -- model/auth_test.go | 89 - model/auth_validator_test.go | 210 -- model/authentication.go | 187 + model/authentication_oauth.go | 212 ++ model/authentication_oauth_test.go | 164 + model/authentication_test.go | 98 + model/builder.go | 99 + model/callback_state.go | 60 - model/callback_state_validator_test.go | 116 - model/common.go | 29 - model/delay_state.go | 33 - model/delay_state_test.go | 15 - model/delay_state_validator_test.go | 68 - model/doc.go | 18 - model/endpoint.go | 184 + model/endpoint_test.go | 144 + model/event.go | 134 - model/event_data_filter.go | 46 - model/event_data_filter_test.go | 81 - model/event_data_filter_validator_test.go | 22 - model/event_state.go | 109 - model/event_state_test.go | 152 - model/event_state_validator.go | 39 - model/event_state_validator_test.go | 189 - model/event_test.go | 120 - model/event_validator.go | 40 - model/event_validator_test.go | 216 -- model/extension.go | 120 + model/extension_test.go | 140 + model/foreach_state.go | 108 - model/foreach_state_test.go | 70 - model/foreach_state_validator.go | 45 - model/foreach_state_validator_test.go | 121 - model/function.go | 95 - model/function_validator_test.go | 74 - model/inject_state.go | 49 - model/inject_state_validator_test.go | 28 - model/object.go | 210 -- model/object_test.go | 181 - model/objects.go | 260 ++ model/objects_test.go | 190 + model/operation_state.go | 71 - model/operation_state_test.go | 72 - model/operation_state_validator_test.go | 121 - model/parallel_state.go | 123 - model/parallel_state_test.go | 67 - model/parallel_state_validator.go | 39 - model/parallel_state_validator_test.go | 252 -- model/retry.go | 57 - model/retry_test.go | 15 - model/retry_validator.go | 41 - model/retry_validator_test.go | 101 - model/runtime_expression.go | 81 + model/runtime_expression_test.go | 70 + model/sleep_state.go | 48 - model/sleep_state_test.go | 15 - model/sleep_state_validator_test.go | 95 - model/state_exec_timeout.go | 34 - model/state_exec_timeout_test.go | 113 - model/state_exec_timeout_validator_test.go | 95 - model/states.go | 283 -- model/states_validator.go | 51 - model/states_validator_test.go | 151 - model/switch_state.go | 152 - model/switch_state_test.go | 110 - model/switch_state_validator.go | 64 - model/switch_state_validator_test.go | 274 -- model/task.go | 418 +++ model/task_call.go | 112 + model/task_call_test.go | 480 +++ ...ta_filter_validator_test.go => task_do.go} | 13 +- model/task_do_test.go | 103 + model/task_event.go | 282 ++ model/task_event_test.go | 231 ++ model/task_for.go | 30 + model/task_for_test.go | 150 + model/task_fork.go | 27 + model/task_fork_test.go | 116 + model/task_raise.go | 84 + model/task_raise_test.go | 99 + model/task_run.go | 124 + model/task_run_test.go | 196 + model/task_set.go | 36 + model/task_set_test.go | 104 + model/task_switch.go | 44 + model/task_switch_test.go | 151 + model/task_test.go | 121 + model/task_try.go | 202 ++ model/task_try_test.go | 171 + model/task_wait.go | 68 + model/task_wait_test.go | 88 + model/timeout.go | 232 ++ model/timeout_test.go | 228 ++ model/validator.go | 389 ++ model/validator_test.go | 68 + model/workflow.go | 684 +--- model/workflow_ref.go | 72 - model/workflow_ref_test.go | 105 - model/workflow_ref_validator_test.go | 68 - model/workflow_test.go | 1022 +++--- model/workflow_validator.go | 247 -- model/workflow_validator_test.go | 544 --- model/zz_generated.buildergen.go | 3139 ----------------- model/zz_generated.deepcopy.go | 1837 ---------- parser/cmd/main.go | 67 + parser/parser.go | 11 +- parser/parser_test.go | 1157 +----- .../testdata/applicationrequestfunctions.json | 8 - .../testdata/applicationrequestretries.json | 9 - parser/testdata/constantsDogs.json | 9 - parser/testdata/datainputschema.json | 16 - parser/testdata/errors.json | 13 - parser/testdata/eventbasedgreetingevents.json | 9 - parser/testdata/functiondefs.json | 16 - .../{eventdefs.yml => invalid_workflow.yaml} | 25 +- parser/testdata/secrets.json | 6 - parser/testdata/timeouts.json | 6 - parser/testdata/valid_workflow.json | 19 + .../auth.yaml => valid_workflow.yaml} | 24 +- .../workflows/VetAppointmentWorkflow.json | 45 - .../workflows/actiondata-defaultvalue.yaml | 34 - .../applicationrequest-issue103.json | 79 - .../applicationrequest-issue16.sw.yaml | 48 - .../workflows/applicationrequest-issue69.json | 79 - .../workflows/applicationrequest.json | 75 - .../applicationrequest.multiauth.json | 107 - .../workflows/applicationrequest.rp.json | 69 - .../workflows/applicationrequest.url.json | 69 - parser/testdata/workflows/checkInbox.json | 53 - parser/testdata/workflows/checkcarvitals.json | 60 - .../workflows/checkinbox.cron-test.sw.yaml | 45 - parser/testdata/workflows/checkinbox.sw.yaml | 49 - parser/testdata/workflows/compensate.sw.json | 99 - .../testdata/workflows/compensation.sw.json | 72 - .../workflows/conditionbasedstate.yaml | 40 - .../workflows/continue-as-example.yaml | 58 - .../customerbankingtransactions.json | 43 - .../workflows/customercreditcheck.json | 96 - .../workflows/dataInputSchemaNotExists.yaml | 53 - .../workflows/dataInputSchemaObject.json | 56 - .../workflows/dataInputSchemaValidation.yaml | 28 - .../workflows/eventbaseddataandswitch.sw.json | 107 - .../workflows/eventbasedgreeting.sw.json | 52 - .../workflows/eventbasedgreeting.sw.p.json | 49 - .../eventbasedgreetingexclusive.sw.json | 83 - .../eventbasedgreetingnonexclusive.sw.json | 62 - .../workflows/eventbasedswitch.sw.json | 92 - .../workflows/eventbasedswitchstate.json | 70 - .../testdata/workflows/fillglassofwater.json | 48 - .../workflows/finalizeCollegeApplication.json | 74 - .../greetings-constants-file.sw.yaml | 40 - .../workflows/greetings-secret-file.sw.yaml | 40 - .../workflows/greetings-secret.sw.yaml | 41 - .../workflows/greetings-v08-spec.sw.yaml | 273 -- parser/testdata/workflows/greetings.sw.json | 34 - parser/testdata/workflows/greetings.sw.yaml | 40 - .../workflows/greetings_sleep.sw.json | 47 - .../workflows/handleCarAuctionBid.json | 49 - parser/testdata/workflows/helloworld.json | 18 - parser/testdata/workflows/jobmonitoring.json | 127 - .../testdata/workflows/onboardcustomer.json | 25 - parser/testdata/workflows/parallelexec.json | 34 - .../workflows/patientVitalsWorkflow.json | 110 - .../workflows/patientonboarding.sw.yaml | 64 - .../workflows/paymentconfirmation.json | 96 - .../workflows/provisionorders.sw.json | 100 - .../workflows/purchaseorderworkflow.sw.json | 162 - .../roomreadings.timeouts.file.sw.json | 80 - .../workflows/roomreadings.timeouts.sw.json | 88 - .../workflows/sendcloudeventonprovision.json | 47 - .../testdata/workflows/sendcustomeremail.json | 32 - .../testdata/workflows/solvemathproblems.json | 37 - parser/testdata/workflows/urifiles/auth.json | 17 - parser/testdata/workflows/vitalscheck.json | 53 - .../applicationrequest-issue44.json | 85 - .../applicationrequest-issue74.json | 82 - ...pplicationrequest.auth.invalid.format.json | 85 - .../applicationrequest.authdupl.json | 96 - test/path.go | 58 - test/path_test.go | 31 - test/utils.go | 37 + tools.mod | 4 +- util/floatstr/floatstr.go | 179 - util/floatstr/floatstr_test.go | 109 - util/unmarshal.go | 335 -- util/unmarshal_benchmark_test.go | 31 - util/unmarshal_test.go | 290 -- validator/tags.go | 20 - validator/validator.go | 120 - validator/validator_test.go | 228 -- validator/workflow.go | 154 - 216 files changed, 7655 insertions(+), 22749 deletions(-) delete mode 100644 .github/workflows/Go-SDK-Check-k8s-integration.yaml delete mode 100644 config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml delete mode 100644 config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml delete mode 100755 hack/builder-gen.sh delete mode 100644 hack/conv/main.go delete mode 100755 hack/deepcopy-gen.sh create mode 100755 hack/integration-test.sh delete mode 100644 kubernetes/api/v1alpha1/groupversion_info.go delete mode 100644 kubernetes/api/v1alpha1/serverlessworkflow_types.go delete mode 100644 kubernetes/api/v1alpha1/zz_generated.deepcopy.go delete mode 100644 model/action.go delete mode 100644 model/action_data_filter.go delete mode 100644 model/action_data_filter_test.go delete mode 100644 model/action_test.go delete mode 100644 model/action_validator.go delete mode 100644 model/action_validator_test.go delete mode 100644 model/auth.go delete mode 100644 model/auth_test.go delete mode 100644 model/auth_validator_test.go create mode 100644 model/authentication.go create mode 100644 model/authentication_oauth.go create mode 100644 model/authentication_oauth_test.go create mode 100644 model/authentication_test.go create mode 100644 model/builder.go delete mode 100644 model/callback_state.go delete mode 100644 model/callback_state_validator_test.go delete mode 100644 model/common.go delete mode 100644 model/delay_state.go delete mode 100644 model/delay_state_test.go delete mode 100644 model/delay_state_validator_test.go delete mode 100644 model/doc.go create mode 100644 model/endpoint.go create mode 100644 model/endpoint_test.go delete mode 100644 model/event.go delete mode 100644 model/event_data_filter.go delete mode 100644 model/event_data_filter_test.go delete mode 100644 model/event_data_filter_validator_test.go delete mode 100644 model/event_state.go delete mode 100644 model/event_state_test.go delete mode 100644 model/event_state_validator.go delete mode 100644 model/event_state_validator_test.go delete mode 100644 model/event_test.go delete mode 100644 model/event_validator.go delete mode 100644 model/event_validator_test.go create mode 100644 model/extension.go create mode 100644 model/extension_test.go delete mode 100644 model/foreach_state.go delete mode 100644 model/foreach_state_test.go delete mode 100644 model/foreach_state_validator.go delete mode 100644 model/foreach_state_validator_test.go delete mode 100644 model/function.go delete mode 100644 model/function_validator_test.go delete mode 100644 model/inject_state.go delete mode 100644 model/inject_state_validator_test.go delete mode 100644 model/object.go delete mode 100644 model/object_test.go create mode 100644 model/objects.go create mode 100644 model/objects_test.go delete mode 100644 model/operation_state.go delete mode 100644 model/operation_state_test.go delete mode 100644 model/operation_state_validator_test.go delete mode 100644 model/parallel_state.go delete mode 100644 model/parallel_state_test.go delete mode 100644 model/parallel_state_validator.go delete mode 100644 model/parallel_state_validator_test.go delete mode 100644 model/retry.go delete mode 100644 model/retry_test.go delete mode 100644 model/retry_validator.go delete mode 100644 model/retry_validator_test.go create mode 100644 model/runtime_expression.go create mode 100644 model/runtime_expression_test.go delete mode 100644 model/sleep_state.go delete mode 100644 model/sleep_state_test.go delete mode 100644 model/sleep_state_validator_test.go delete mode 100644 model/state_exec_timeout.go delete mode 100644 model/state_exec_timeout_test.go delete mode 100644 model/state_exec_timeout_validator_test.go delete mode 100644 model/states.go delete mode 100644 model/states_validator.go delete mode 100644 model/states_validator_test.go delete mode 100644 model/switch_state.go delete mode 100644 model/switch_state_test.go delete mode 100644 model/switch_state_validator.go delete mode 100644 model/switch_state_validator_test.go create mode 100644 model/task.go create mode 100644 model/task_call.go create mode 100644 model/task_call_test.go rename model/{action_data_filter_validator_test.go => task_do.go} (61%) create mode 100644 model/task_do_test.go create mode 100644 model/task_event.go create mode 100644 model/task_event_test.go create mode 100644 model/task_for.go create mode 100644 model/task_for_test.go create mode 100644 model/task_fork.go create mode 100644 model/task_fork_test.go create mode 100644 model/task_raise.go create mode 100644 model/task_raise_test.go create mode 100644 model/task_run.go create mode 100644 model/task_run_test.go create mode 100644 model/task_set.go create mode 100644 model/task_set_test.go create mode 100644 model/task_switch.go create mode 100644 model/task_switch_test.go create mode 100644 model/task_test.go create mode 100644 model/task_try.go create mode 100644 model/task_try_test.go create mode 100644 model/task_wait.go create mode 100644 model/task_wait_test.go create mode 100644 model/timeout.go create mode 100644 model/timeout_test.go create mode 100644 model/validator.go create mode 100644 model/validator_test.go delete mode 100644 model/workflow_ref.go delete mode 100644 model/workflow_ref_test.go delete mode 100644 model/workflow_ref_validator_test.go delete mode 100644 model/workflow_validator.go delete mode 100644 model/workflow_validator_test.go delete mode 100644 model/zz_generated.buildergen.go delete mode 100644 model/zz_generated.deepcopy.go create mode 100644 parser/cmd/main.go delete mode 100644 parser/testdata/applicationrequestfunctions.json delete mode 100644 parser/testdata/applicationrequestretries.json delete mode 100644 parser/testdata/constantsDogs.json delete mode 100644 parser/testdata/datainputschema.json delete mode 100644 parser/testdata/errors.json delete mode 100644 parser/testdata/eventbasedgreetingevents.json delete mode 100644 parser/testdata/functiondefs.json rename parser/testdata/{eventdefs.yml => invalid_workflow.yaml} (59%) delete mode 100644 parser/testdata/secrets.json delete mode 100644 parser/testdata/timeouts.json create mode 100644 parser/testdata/valid_workflow.json rename parser/testdata/{workflows/urifiles/auth.yaml => valid_workflow.yaml} (62%) delete mode 100644 parser/testdata/workflows/VetAppointmentWorkflow.json delete mode 100644 parser/testdata/workflows/actiondata-defaultvalue.yaml delete mode 100644 parser/testdata/workflows/applicationrequest-issue103.json delete mode 100644 parser/testdata/workflows/applicationrequest-issue16.sw.yaml delete mode 100644 parser/testdata/workflows/applicationrequest-issue69.json delete mode 100644 parser/testdata/workflows/applicationrequest.json delete mode 100644 parser/testdata/workflows/applicationrequest.multiauth.json delete mode 100644 parser/testdata/workflows/applicationrequest.rp.json delete mode 100644 parser/testdata/workflows/applicationrequest.url.json delete mode 100644 parser/testdata/workflows/checkInbox.json delete mode 100644 parser/testdata/workflows/checkcarvitals.json delete mode 100644 parser/testdata/workflows/checkinbox.cron-test.sw.yaml delete mode 100644 parser/testdata/workflows/checkinbox.sw.yaml delete mode 100644 parser/testdata/workflows/compensate.sw.json delete mode 100644 parser/testdata/workflows/compensation.sw.json delete mode 100644 parser/testdata/workflows/conditionbasedstate.yaml delete mode 100644 parser/testdata/workflows/continue-as-example.yaml delete mode 100644 parser/testdata/workflows/customerbankingtransactions.json delete mode 100644 parser/testdata/workflows/customercreditcheck.json delete mode 100644 parser/testdata/workflows/dataInputSchemaNotExists.yaml delete mode 100644 parser/testdata/workflows/dataInputSchemaObject.json delete mode 100644 parser/testdata/workflows/dataInputSchemaValidation.yaml delete mode 100644 parser/testdata/workflows/eventbaseddataandswitch.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreeting.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreeting.sw.p.json delete mode 100644 parser/testdata/workflows/eventbasedgreetingexclusive.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json delete mode 100644 parser/testdata/workflows/eventbasedswitch.sw.json delete mode 100644 parser/testdata/workflows/eventbasedswitchstate.json delete mode 100644 parser/testdata/workflows/fillglassofwater.json delete mode 100644 parser/testdata/workflows/finalizeCollegeApplication.json delete mode 100644 parser/testdata/workflows/greetings-constants-file.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-secret-file.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-secret.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-v08-spec.sw.yaml delete mode 100644 parser/testdata/workflows/greetings.sw.json delete mode 100644 parser/testdata/workflows/greetings.sw.yaml delete mode 100644 parser/testdata/workflows/greetings_sleep.sw.json delete mode 100644 parser/testdata/workflows/handleCarAuctionBid.json delete mode 100644 parser/testdata/workflows/helloworld.json delete mode 100644 parser/testdata/workflows/jobmonitoring.json delete mode 100644 parser/testdata/workflows/onboardcustomer.json delete mode 100644 parser/testdata/workflows/parallelexec.json delete mode 100644 parser/testdata/workflows/patientVitalsWorkflow.json delete mode 100644 parser/testdata/workflows/patientonboarding.sw.yaml delete mode 100644 parser/testdata/workflows/paymentconfirmation.json delete mode 100644 parser/testdata/workflows/provisionorders.sw.json delete mode 100644 parser/testdata/workflows/purchaseorderworkflow.sw.json delete mode 100644 parser/testdata/workflows/roomreadings.timeouts.file.sw.json delete mode 100644 parser/testdata/workflows/roomreadings.timeouts.sw.json delete mode 100644 parser/testdata/workflows/sendcloudeventonprovision.json delete mode 100644 parser/testdata/workflows/sendcustomeremail.json delete mode 100644 parser/testdata/workflows/solvemathproblems.json delete mode 100644 parser/testdata/workflows/urifiles/auth.json delete mode 100644 parser/testdata/workflows/vitalscheck.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue44.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue74.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest.authdupl.json delete mode 100644 test/path.go delete mode 100644 test/path_test.go create mode 100644 test/utils.go delete mode 100644 util/floatstr/floatstr.go delete mode 100644 util/floatstr/floatstr_test.go delete mode 100644 util/unmarshal.go delete mode 100644 util/unmarshal_benchmark_test.go delete mode 100644 util/unmarshal_test.go delete mode 100644 validator/tags.go delete mode 100644 validator/validator.go delete mode 100644 validator/validator_test.go delete mode 100644 validator/workflow.go diff --git a/.github/workflows/Go-SDK-Check-k8s-integration.yaml b/.github/workflows/Go-SDK-Check-k8s-integration.yaml deleted file mode 100644 index 6d97acb..0000000 --- a/.github/workflows/Go-SDK-Check-k8s-integration.yaml +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Go SDK Check k8s integration -on: - pull_request: - paths-ignore: - - "**.md" - - "hack/**" - - "LICENSE" - - "Makefile" - branches: - - main - -permissions: - contents: read - -env: - GO_VERSION: 1.22 -jobs: - basic_checks: - name: Basic Checks - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v4 - - name: Setup Go ${{ env.GO_VERSION }} - uses: actions/setup-go@v5 - with: - go-version: ${{ env.GO_VERSION }} - id: go - - name: Cache dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/go-build - ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - name: Cache tools - uses: actions/cache@v4 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} - restore-keys: | - ${{ runner.os }}-go-tools- - - name: Check K8s Integration - run: | - export GOPATH=$(go env GOPATH) - make kube-integration \ No newline at end of file diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 7b8a8a3..8d4da2f 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -1,10 +1,10 @@ -# Copyright 2020 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ # limitations under the License. name: Go SDK PR Checks + on: pull_request: paths-ignore: @@ -23,11 +24,13 @@ on: branches: - main + permissions: contents: read env: GO_VERSION: 1.22 + jobs: basic_checks: name: Basic Checks @@ -35,12 +38,14 @@ jobs: steps: - name: Checkout Code uses: actions/checkout@v4 - - name: Setup Go ${{ env.GO_VERSION }} + + - name: Setup Go uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} id: go - - name: Cache dependencies + + - name: Cache Go Modules uses: actions/cache@v4 with: path: | @@ -49,33 +54,73 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - name: Cache tools + + - name: Cache Tools uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-tools-${{ hashFiles('**/tools.sum') }} restore-keys: | ${{ runner.os }}-go-tools- + - name: Check Headers run: | make addheaders changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are missing the headers: \n$changed_files\n Did you run 'make lint' before sending the PR" && exit 1) - - name: Check DeepCopy Generation - run: | - export GOPATH=$(go env GOPATH) - make deepcopy + if [[ -n "$changed_files" ]]; then + echo "❌ Some files are missing headers:\n$changed_files" + exit 1 + fi + - name: Check Formatting run: | make fmt changed_files=$(git status -s | grep -v 'go.mod\|go.sum\|tools.mod\|tools.sum' || :) - [[ -z "$changed_files" ]] || (printf "Some files are not formatted properly: \n$changed_files\n Did you run 'make test' before sending the PR?" && exit 1) - - name: Check lint - uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues + if [[ -n "$changed_files" ]]; then + echo "❌ Some files are not formatted correctly:\n$changed_files" + exit 1 + fi + + - name: Run Linter + uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.1.1 - Please ALWAYS use SHA to avoid GH sec issues with: version: latest - - name: Install cover - run: go get -modfile=tools.mod golang.org/x/tools/cmd/cover + + - name: Install Cover Tool + run: go install golang.org/x/tools/cmd/cover@latest + - name: Run Unit Tests + run: go test ./... -coverprofile=test_coverage.out -covermode=atomic + + - name: Upload Coverage Report + uses: actions/upload-artifact@v3 + with: + name: Test Coverage Report + path: test_coverage.out + + integration_tests: + name: Integration Tests + runs-on: ubuntu-latest + needs: basic_checks + steps: + - name: Checkout Code + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + id: go + + - name: Run Integration Tests run: | - go test ./... -coverprofile test_coverage.out -covermode=atomic + chmod +x ./hack/integration-test.sh + ./hack/integration-test.sh + continue-on-error: true + + - name: Upload JUnit Report + if: always() + uses: actions/upload-artifact@v3 + with: + name: Integration Test JUnit Report + path: ./integration-test-junit.xml diff --git a/.gitignore b/.gitignore index 55109f1..914d9c4 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ bin *.out .vscode +integration-test-junit.xml diff --git a/Makefile b/Makefile index a8f36db..767d158 100644 --- a/Makefile +++ b/Makefile @@ -22,44 +22,14 @@ lint: .PHONY: test coverage="false" -test: deepcopy buildergen +test: @echo "πŸ§ͺ Running tests..." @go test ./... @echo "βœ… Tests completed!" -deepcopy: $(DEEPCOPY_GEN) ## Download deepcopy-gen locally if necessary. - @echo "πŸ“¦ Running deepcopy-gen..." - @./hack/deepcopy-gen.sh deepcopy > /dev/null - @make lint - @echo "βœ… Deepcopy generation and linting completed!" - -buildergen: $(BUILDER_GEN) ## Download builder-gen locally if necessary. - @echo "πŸ“¦ Running builder-gen..." - @./hack/builder-gen.sh buildergen > /dev/null - @make lint - @echo "βœ… Builder generation and linting completed!" - -.PHONY: kube-integration -kube-integration: controller-gen - @echo "πŸ“¦ Generating Kubernetes objects..." - @$(CONTROLLER_GEN) object:headerFile="./hack/boilerplate.txt" paths="./kubernetes/api/..." - @echo "πŸ“¦ Generating Kubernetes CRDs..." - @$(CONTROLLER_GEN) rbac:roleName=manager-role crd:allowDangerousTypes=true webhook paths="./kubernetes/..." output:crd:artifacts:config=config/crd/bases - @make lint - @echo "βœ… Kubernetes integration completed!" - - -#################################### -# install controller-gen tool -## Location to install dependencies to -LOCALBIN ?= $(shell pwd)/bin -$(LOCALBIN): - mkdir -p $(LOCALBIN) - -CONTROLLER_TOOLS_VERSION ?= v0.16.3 -CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen -.PHONY: controller-gen -controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. -$(CONTROLLER_GEN): $(LOCALBIN) - test -s $(LOCALBIN)/controller-gen || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-tools/cmd/controller-gen@$(CONTROLLER_TOOLS_VERSION) +.PHONY: integration-test +integration-test: + @echo "πŸ”„ Running integration tests..." + @./hack/integration-test.sh + @echo "βœ… Integration tests completed!" \ No newline at end of file diff --git a/README.md b/README.md index e4d87c6..786333e 100644 --- a/README.md +++ b/README.md @@ -1,107 +1,154 @@ # Go SDK for Serverless Workflow -Here you will find all the [specification types](https://github.com/serverlessworkflow/specification/blob/main/schema/workflow.json) defined by our Json Schemas, in Go. -Table of Contents -================= +The Go SDK for Serverless Workflow provides the [specification types](https://github.com/serverlessworkflow/specification/blob/v1.0.0-alpha5/schema/workflow.yaml) defined by the Serverless Workflow DSL in Go, making it easy to parse, validate, and interact with workflows. + +--- + +## Table of Contents - [Status](#status) - [Releases](#releases) -- [How to Use](#how-to-use) - - [Parsing Serverless Workflow files](#parsing-serverless-workflow-files) -- [Slack Channel](#slack-channel) -- [Contributors Guide](#contributors-guide) +- [Getting Started](#getting-started) + - [Installation](#installation) + - [Parsing Workflow Files](#parsing-workflow-files) + - [Programmatic Workflow Creation](#programmatic-workflow-creation) +- [Slack Community](#slack-community) +- [Contributing](#contributing) - [Code Style](#code-style) - [EditorConfig](#editorconfig) - [Known Issues](#known-issues) +--- ## Status -Current status of features implemented in the SDK is listed in the table below: + +The current status of features implemented in the SDK is listed below: | Feature | Status | |-------------------------------------------- | ------------------ | -| Parse workflow JSON and YAML definitions | :heavy_check_mark: | -| Programmatically build workflow definitions | :no_entry_sign: | +| Parse workflow JSON and YAML definitions | :heavy_check_mark: | +| Programmatically build workflow definitions | :heavy_check_mark: | | Validate workflow definitions (Schema) | :heavy_check_mark: | -| Validate workflow definitions (Integrity) | :heavy_check_mark: | +| Validate workflow definitions (Integrity) | :no_entry_sign: | | Generate workflow diagram (SVG) | :no_entry_sign: | +--- ## Releases -| Latest Releases | Conformance to spec version | -|:--------------------------------------------------------------------------:| :---: | -| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | -| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | -| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.4.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | -## How to use +| Latest Releases | Conformance to Spec Version | +|:--------------------------------------------------------------------------:|:------------------------------------------------------------------------:| +| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | +| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.4.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0-alpha5) | + +--- -Run the following command in the root of your Go's project: +## Getting Started -```shell script -$ go get github.com/serverlessworkflow/sdk-go/v2 +### Installation + +To use the SDK in your Go project, run the following command: + +```shell +$ go get github.com/serverlessworkflow/sdk-go/v3 ``` -Your `go.mod` file should be updated to add a dependency from the Serverless Workflow specification. +This will update your `go.mod` file to include the Serverless Workflow SDK as a dependency. -To use the generated types, import the package in your go file like this: +Import the SDK in your Go file: ```go -import "github.com/serverlessworkflow/sdk-go/v2/model" +import "github.com/serverlessworkflow/sdk-go/v3/model" ``` -Then just reference the package in your Go file like `myfunction := model.Function{}`. +You can now use the SDK types and functions, for example: -### Parsing Serverless Workflow files +```go +package main -Serverless Workflow Specification supports YAML and JSON files for Workflow definitions. -To transform such files into a Go data structure, use: +import ( + "github.com/serverlessworkflow/sdk-go/v3/builder" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func main() { + workflowBuilder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + workflow, _ := builder.Object(workflowBuilder) + // use your models +} + +``` + +### Parsing Workflow Files + +The Serverless Workflow Specification supports YAML and JSON files. Use the following example to parse a workflow file into a Go data structure: ```go -package sw +package main import ( - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/parser" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/parser" ) func ParseWorkflow(filePath string) (*model.Workflow, error) { workflow, err := parser.FromFile(filePath) if err != nil { return nil, err - } + } return workflow, nil -} +} ``` -The `Workflow` structure then can be used in your application. +This `Workflow` structure can then be used programmatically in your application. -## Slack Channel +### Programmatic Workflow Creation -Join us at [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf), channel `#serverless-workflow-sdk` and say hello πŸ™‹. +Support for building workflows programmatically is planned for future releases. Stay tuned for updates in upcoming versions. -## Contributors Guide +--- -This guide aims to guide newcomers to getting started with the project standards. +## Slack Community +Join the conversation and connect with other contributors on the [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf). Find us in the `#serverless-workflow-sdk` channel and say hello! πŸ™‹ -### Code Style +--- -For this project we use basically the default configuration for most used IDEs. -For the configurations below, make sure to properly configure your IDE: +## Contributing -- **imports**: goimports +We welcome contributions to improve this SDK. Please refer to the sections below for guidance on maintaining project standards. -This should be enough to get you started. +### Code Style -If you are unsure that your IDE is not correctly configured, you can run the lint checks: +- Use `goimports` for import organization. +- Lint your code with: ```bash make lint ``` -If something goes wrong, the error will be printed, e.g.: +To automatically fix lint issues, use: + +```bash +make lint params=--fix +``` + +Example lint error: + ```bash $ make lint make addheaders @@ -112,26 +159,23 @@ util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) make: *** [lint] Error 1 ``` -Lint issues can be fixed with the `--fix` flag, this command can be used: -```bash -make lint params=--fix -``` - - ### EditorConfig -For IntelliJ you can find an example `editorconfig` file [here](contrib/intellij.editorconfig). To use it please visit -the Jetbrains [documentation](https://www.jetbrains.com/help/idea/editorconfig.html). +For IntelliJ users, an example `.editorconfig` file is available [here](contrib/intellij.editorconfig). See the [Jetbrains documentation](https://www.jetbrains.com/help/idea/editorconfig.html) for usage details. ### Known Issues -On MacOSX/darwin you might get this issue: +#### MacOS Issue: + +On MacOS, you might encounter the following error: + ``` - goimports: can't extract issues from gofmt diff output +goimports: can't extract issues from gofmt diff output ``` -To solve install the `diffutils` package: + +To resolve this, install `diffutils`: ```bash - brew install diffutils +brew install diffutils ``` diff --git a/builder/builder.go b/builder/builder.go index 97ef3b2..45ccc2e 100644 --- a/builder/builder.go +++ b/builder/builder.go @@ -16,46 +16,52 @@ package builder import ( "encoding/json" + "fmt" - "sigs.k8s.io/yaml" + "github.com/serverlessworkflow/sdk-go/v3/model" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" ) +// New initializes a new WorkflowBuilder instance. func New() *model.WorkflowBuilder { return model.NewWorkflowBuilder() } +// Yaml generates YAML output from the WorkflowBuilder using custom MarshalYAML implementations. func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { - data, err := Json(builder) + workflow, err := Object(builder) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to build workflow object: %w", err) } - return yaml.JSONToYAML(data) + return yaml.Marshal(workflow) } +// Json generates JSON output from the WorkflowBuilder. func Json(builder *model.WorkflowBuilder) ([]byte, error) { workflow, err := Object(builder) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to build workflow object: %w", err) } - return json.Marshal(workflow) + return json.MarshalIndent(workflow, "", " ") } +// Object builds and validates the Workflow object from the builder. func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { workflow := builder.Build() - ctx := model.NewValidatorContext(&workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, err + + // Validate the workflow object + if err := model.GetValidator().Struct(workflow); err != nil { + return nil, fmt.Errorf("workflow validation failed: %w", err) } - return &workflow, nil + + return workflow, nil } +// Validate validates any given object using the Workflow model validator. func Validate(object interface{}) error { - ctx := model.NewValidatorContext(object) - if err := val.GetValidator().StructCtx(ctx, object); err != nil { - return val.WorkflowError(err) + if err := model.GetValidator().Struct(object); err != nil { + return fmt.Errorf("validation failed: %w", err) } return nil } diff --git a/builder/builder_test.go b/builder/builder_test.go index 97b8c84..cbec324 100644 --- a/builder/builder_test.go +++ b/builder/builder_test.go @@ -15,106 +15,163 @@ package builder import ( + "errors" "testing" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" + "github.com/go-playground/validator/v10" + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/test" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/stretchr/testify/assert" ) -func prepareBuilder() *model.WorkflowBuilder { - builder := New().Key("key test").ID("id test") - - builder.AddFunctions().Name("function name").Operation("http://test") - builder.AddFunctions().Name("function name2").Operation("http://test") - - function3 := builder.AddFunctions().Name("function name2").Operation("http://test") - builder.RemoveFunctions(function3) - - state1 := builder.AddStates(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) +func TestBuilder_Yaml(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + // Generate YAML from the builder + yamlData, err := Yaml(builder) + assert.NoError(t, err) - inject := state1.InjectState() - inject.Data(map[string]model.Object{ - "test": model.FromMap(map[string]any{}), - }) + // Define the expected YAML structure + expectedYAML := `document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: +- task1: + call: http + if: ${condition} + with: + method: GET + endpoint: http://example.com +` - return builder + // Use assertYAMLEq to compare YAML structures + test.AssertYAMLEq(t, expectedYAML, string(yamlData)) } -func TestValidate(t *testing.T) { - state1 := model.NewStateBuilder(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) - err := Validate(state1) +func TestBuilder_Json(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + jsonData, err := Json(builder) assert.NoError(t, err) - state2 := model.NewStateBuilder(). - Type(model.StateTypeInject) - state2.End().Terminate(true) - err = Validate(state2.Build()) - if assert.Error(t, err) { - var workflowErrors val.WorkflowErrors - if errors.As(err, &workflowErrors) { - assert.Equal(t, "state.name is required", workflowErrors[0].Error()) - } else { - // Handle other error types if necessary - t.Errorf("Unexpected error: %v", err) - } - } + expectedJSON := `{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "if": "${condition}", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +}` + assert.JSONEq(t, expectedJSON, string(jsonData)) } -func TestObject(t *testing.T) { - workflow, err := Object(prepareBuilder()) - if assert.NoError(t, err) { - assert.Equal(t, "key test", workflow.Key) - assert.Equal(t, "id test", workflow.ID) - assert.Equal(t, "0.8", workflow.SpecVersion) - assert.Equal(t, "jq", workflow.ExpressionLang.String()) - assert.Equal(t, 2, len(workflow.Functions)) - - assert.Equal(t, "function name", workflow.Functions[0].Name) - assert.Equal(t, "function name2", workflow.Functions[1].Name) - } +func TestBuilder_Object(t *testing.T) { + builder := New(). + SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). + AddTask("task1", &model.CallHTTP{ + TaskBase: model.TaskBase{ + If: &model.RuntimeExpression{Value: "${condition}"}, + }, + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }) + + workflow, err := Object(builder) + assert.NoError(t, err) + assert.NotNil(t, workflow) + + assert.Equal(t, "1.0.0", workflow.Document.DSL) + assert.Equal(t, "examples", workflow.Document.Namespace) + assert.Equal(t, "example-workflow", workflow.Document.Name) + assert.Equal(t, "1.0.0", workflow.Document.Version) + assert.Len(t, *workflow.Do, 1) + assert.Equal(t, "http", (*workflow.Do)[0].Task.(*model.CallHTTP).Call) } -func TestJson(t *testing.T) { - data, err := Json(prepareBuilder()) - if assert.NoError(t, err) { - d := `{"id":"id test","key":"key test","version":"","specVersion":"0.8","expressionLang":"jq","states":[{"name":"state","type":"inject","end":{"terminate":true},"data":{"test":{}}}],"functions":[{"name":"function name","operation":"http://test","type":"rest"},{"name":"function name2","operation":"http://test","type":"rest"}]}` - assert.Equal(t, d, string(data)) +func TestBuilder_Validate(t *testing.T) { + workflow := &model.Workflow{ + Document: model.Document{ + DSL: "1.0.0", + Namespace: "examples", + Name: "example-workflow", + Version: "1.0.0", + }, + Do: &model.TaskList{ + { + Key: "task1", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", + Endpoint: model.NewEndpoint("http://example.com"), + }, + }, + }, + }, } -} -func TestYaml(t *testing.T) { - data, err := Yaml(prepareBuilder()) - if assert.NoError(t, err) { - d := `expressionLang: jq -functions: -- name: function name - operation: http://test - type: rest -- name: function name2 - operation: http://test - type: rest -id: id test -key: key test -specVersion: "0.8" -states: -- data: - test: {} - end: - terminate: true - name: state - type: inject -version: "" -` + err := Validate(workflow) + assert.NoError(t, err) - assert.Equal(t, d, string(data)) + // Test validation failure + workflow.Do = &model.TaskList{ + { + Key: "task2", + Task: &model.CallHTTP{ + Call: "http", + With: model.HTTPArguments{ + Method: "GET", // Missing Endpoint + }, + }, + }, + } + err = Validate(workflow) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + t.Logf("Validation errors: %v", validationErrors) + assert.Contains(t, validationErrors.Error(), "Do[0].Task.With.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") } } diff --git a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml b/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml deleted file mode 100644 index 1dfc265..0000000 --- a/config/crd/bases/serverlessworkflow.io_serverlessworkflows.yaml +++ /dev/null @@ -1,1967 +0,0 @@ -# Copyright 2024 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - annotations: - controller-gen.kubebuilder.io/version: v0.16.3 - name: serverlessworkflows.serverlessworkflow.io -spec: - group: serverlessworkflow.io - names: - kind: ServerlessWorkflow - listKind: ServerlessWorkflowList - plural: serverlessworkflows - singular: serverlessworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - description: ServerlessWorkflow ... - properties: - apiVersion: - description: |- - APIVersion defines the versioned schema of this representation of an object. - Servers should convert recognized schemas to the latest internal value, and - may reject unrecognized values. - More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources - type: string - kind: - description: |- - Kind is a string value representing the REST resource this object represents. - Servers may infer this from the endpoint the client submits requests to. - Cannot be updated. - In CamelCase. - More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds - type: string - metadata: - type: object - spec: - description: ServerlessWorkflowSpec defines a base API for integration - test with operator-sdk - properties: - annotations: - description: |- - Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important - qualities. - items: - type: string - type: array - auth: - description: |- - Auth definitions can be used to define authentication information that should be applied to resources defined - in the operation property of function definitions. It is not used as authentication information for the - function invocation, but just to access the resource containing the function invocation information. - x-kubernetes-preserve-unknown-fields: true - autoRetries: - description: AutoRetries If set to true, actions should automatically - be retried on unchecked errors. Default is false - type: boolean - constants: - additionalProperties: - description: |- - RawMessage is a raw encoded JSON value. - It implements [Marshaler] and [Unmarshaler] and can - be used to delay JSON decoding or precompute a JSON encoding. - format: byte - type: string - description: |- - Constants Workflow constants are used to define static, and immutable, data which is available to - Workflow Expressions. - type: object - dataInputSchema: - description: DataInputSchema URI or Object of the JSON Schema used - to validate the workflow data input - properties: - failOnValidationErrors: - type: boolean - schema: - x-kubernetes-preserve-unknown-fields: true - required: - - failOnValidationErrors - - schema - type: object - description: - description: Workflow description. - type: string - errors: - description: Defines checked errors that can be explicitly handled - during workflow execution. - items: - description: Error declaration for workflow definitions - properties: - code: - description: |- - Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. - Should not be defined if error is set to '*'. - type: string - description: - description: OnError description. - type: string - name: - description: Name Domain-specific error name. - type: string - required: - - name - type: object - type: array - events: - items: - description: Event used to define events and their correlations - properties: - correlation: - description: Define event correlation rules for this event. - Only used for consumed events. - items: - description: Correlation define event correlation rules for - an event. Only used for `consumed` events - properties: - contextAttributeName: - description: CloudEvent Extension Context Attribute name - type: string - contextAttributeValue: - description: CloudEvent Extension Context Attribute value - type: string - required: - - contextAttributeName - type: object - type: array - dataOnly: - default: true - description: |- - If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload - and context attributes should be accessible. Defaults to true. - type: boolean - kind: - default: consumed - description: Defines the CloudEvent as either 'consumed' or - 'produced' by the workflow. Defaults to `consumed`. - enum: - - consumed - - produced - type: string - metadata: - description: Metadata information - x-kubernetes-preserve-unknown-fields: true - name: - description: Unique event name. - type: string - source: - description: CloudEvent source. - type: string - type: - description: CloudEvent type. - type: string - required: - - name - - type - type: object - type: array - expressionLang: - default: jq - description: Identifies the expression language used for workflow - expressions. Default is 'jq'. - enum: - - jq - - jsonpath - - cel - type: string - functions: - items: - description: Function ... - properties: - authRef: - description: References an auth definition name to be used to - access to resource defined in the operation parameter. - type: string - metadata: - description: Metadata information - x-kubernetes-preserve-unknown-fields: true - name: - description: Unique function name - type: string - operation: - description: |- - If type is `rest`, #. - If type is `rpc`, ##. - If type is `expression`, defines the workflow expression. If the type is `custom`, - #. - type: string - type: - default: rest - description: |- - Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. - Default is `rest`. - enum: - - rest - - rpc - - expression - - graphql - - odata - - asyncapi - - custom - type: string - required: - - name - - operation - type: object - type: array - id: - description: Workflow unique identifier - type: string - keepActive: - description: |- - If "true", workflow instances is not terminated when there are no active execution paths. - Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" - type: boolean - key: - description: Key Domain-specific workflow identifier - type: string - metadata: - description: Metadata custom information shared with the runtime. - x-kubernetes-preserve-unknown-fields: true - name: - description: Workflow name - type: string - retries: - items: - description: Retry ... - properties: - delay: - description: Time delay between retry attempts (ISO 8601 duration - format) - type: string - increment: - description: Static value by which the delay increases during - each attempt (ISO 8601 time format) - type: string - jitter: - description: If float type, maximum amount of random time added - or subtracted from the delay between each retry relative to - total delay (between 0 and 1). If string type, absolute maximum - amount of random time added or subtracted from the delay between - each retry (ISO 8601 duration format) - properties: - floatVal: - type: number - strVal: - type: string - type: - description: Type represents the stored type of Float32OrString. - format: int64 - type: integer - type: object - maxAttempts: - anyOf: - - type: integer - - type: string - description: Maximum number of retry attempts. - x-kubernetes-int-or-string: true - maxDelay: - description: Maximum time delay between retry attempts (ISO - 8601 duration format) - type: string - multiplier: - description: Numeric value, if specified the delay between retries - is multiplied by this value. - properties: - floatVal: - type: number - strVal: - type: string - type: - description: Type represents the stored type of Float32OrString. - format: int64 - type: integer - type: object - name: - description: Unique retry strategy name - type: string - required: - - maxAttempts - - name - type: object - type: array - secrets: - description: |- - Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, - inside your Workflow Expressions. - items: - type: string - type: array - specVersion: - default: "0.8" - description: Serverless Workflow schema version - type: string - start: - description: Workflow start definition. - x-kubernetes-preserve-unknown-fields: true - states: - description: States ... - items: - properties: - callbackState: - description: callbackState executes a function and waits for - callback event that indicates completion of the task. - properties: - action: - description: Defines the action to be executed. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters state - data that can be used by the action. - type: string - results: - description: Workflow expression that filters the - actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate to - true for this action to be performed. If false, action - is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context attributes - to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should be - invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a 'consumed' - event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a 'produced' - event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should be - invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. String - containing a valid GraphQL selection set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - eventDataFilter: - description: Event data filter definition. - properties: - data: - description: Workflow expression that filters of the - event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRef: - description: References a unique callback event name in - the defined workflow events. - type: string - timeouts: - description: Time period to wait for incoming events (ISO - 8601 format) - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - eventTimeout: - description: Default timeout for consuming defined events - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - action - - eventRef - type: object - compensatedBy: - description: Unique Name of a workflow state which is responsible - for compensation of this state. - type: string - delayState: - description: delayState Causes the workflow execution to delay - for a specified duration. - properties: - timeDelay: - description: Amount of time (ISO 8601 format) to delay - type: string - required: - - timeDelay - type: object - end: - description: State end definition. - x-kubernetes-preserve-unknown-fields: true - eventState: - description: |- - event states await one or more events and perform actions when they are received. If defined as the - workflow starting state, the event state definition controls when the workflow instances should be created. - properties: - exclusive: - default: true - description: |- - If true consuming one of the defined events causes its associated actions to be performed. If false all - the defined events must be consumed in order for actions to be performed. Defaults to true. - type: boolean - onEvents: - description: Define the events to be consumed and optional - actions to be performed. - items: - description: OnEvents define which actions are be performed - for the one or more events. - properties: - actionMode: - default: sequential - description: Should actions be performed sequentially - or in parallel. Default is sequential. - enum: - - sequential - - parallel - type: string - actions: - description: Actions to be performed if expression - matches - items: - description: Action specify invocations of services - or other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If - false, action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' - reusable event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name - of a 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name - of a 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function - definition. - properties: - arguments: - description: Arguments (inputs) to be passed - to the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry - definition. If not defined uses the default - runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - type: array - eventDataFilter: - description: eventDataFilter defines the callback - event data filter definition - properties: - data: - description: Workflow expression that filters - of the event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRefs: - description: References one or more unique event names - in the defined workflow events. - items: - type: string - minItems: 1 - type: array - required: - - eventRefs - type: object - minItems: 1 - type: array - timeouts: - description: State specific timeouts. - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - eventTimeout: - description: Default timeout for consuming defined events - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - onEvents - type: object - forEachState: - description: forEachState used to execute actions for each element - of a data set. - properties: - actions: - description: Actions to be executed for each of the elements - of inputCollection. - items: - description: Action specify invocations of services or - other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If false, - action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a - 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a - 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 0 - type: array - batchSize: - anyOf: - - type: integer - - type: string - description: |- - Specifies how many iterations may run in parallel at the same time. Used if mode property is set to - parallel (default). If not specified, its value should be the size of the inputCollection. - x-kubernetes-int-or-string: true - inputCollection: - description: Workflow expression selecting an array element - of the states' data. - type: string - iterationParam: - description: |- - Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, - this param should contain a unique element of the inputCollection array. - type: string - mode: - default: parallel - description: Specifies how iterations are to be performed - (sequential or in parallel), defaults to parallel. - enum: - - sequential - - parallel - type: string - outputCollection: - description: Workflow expression specifying an array element - of the states data to add the results of each iteration. - type: string - timeouts: - description: State specific timeout. - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - inputCollection - type: object - id: - description: Unique State id. - type: string - injectState: - description: injectState used to inject static data into state - data input. - properties: - data: - description: JSON object which can be set as state's data - input and can be manipulated via filter - x-kubernetes-preserve-unknown-fields: true - timeouts: - description: State specific timeouts - properties: - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - data - type: object - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: State name. - type: string - onErrors: - description: States error handling and retries definitions. - items: - description: OnError ... - properties: - end: - description: |- - End workflow execution in case of this error. If retryRef is defined, this ends workflow only if - retries were unsuccessful. - x-kubernetes-preserve-unknown-fields: true - errorRef: - description: ErrorRef Reference to a unique workflow error - definition. Used of errorRefs is not used - type: string - errorRefs: - description: ErrorRefs References one or more workflow - error definitions. Used if errorRef is not used - items: - type: string - type: array - transition: - description: |- - Transition to next state to handle the error. If retryRef is defined, this transition is taken only if - retries were unsuccessful. - x-kubernetes-preserve-unknown-fields: true - type: object - type: array - operationState: - description: operationState defines a set of actions to be performed - in sequence or in parallel. - properties: - actionMode: - default: sequential - description: Specifies whether actions are performed in - sequence or in parallel, defaults to sequential. - enum: - - sequential - - parallel - type: string - actions: - description: Actions to be performed - items: - description: Action specify invocations of services or - other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If false, - action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' reusable - event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name of a - 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name of a - 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function definition. - properties: - arguments: - description: Arguments (inputs) to be passed to - the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry definition. - If not defined uses the default runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 0 - type: array - timeouts: - description: State specific timeouts - properties: - actionExecTimeout: - description: Default single actions definition execution - timeout (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Defines workflow state execution timeout. - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - actions - type: object - parallelState: - description: parallelState Consists of a number of states that - are executed in parallel. - properties: - branches: - description: List of branches for this parallel state. - items: - description: Branch Definition - properties: - actions: - description: Actions to be executed in this branch - items: - description: Action specify invocations of services - or other workflows during workflow execution. - properties: - actionDataFilter: - description: |- - Filter the state data to select only the data that can be used within function definition arguments - using its fromStateData property. Filter the action results to select only the result data that should - be added/merged back into the state data using its results property. Select the part of state data which - the action data results should be added/merged to using the toStateData property. - properties: - fromStateData: - description: Workflow expression that filters - state data that can be used by the action. - type: string - results: - description: Workflow expression that filters - the actions data results. - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be - added/merged into. If not specified denotes the top-level state data element. - type: string - useResults: - description: |- - If set to false, action data results are not added/merged to state data. In this case 'results' - and 'toStateData' should be ignored. Default is true. - type: boolean - type: object - condition: - description: Expression, if defined, must evaluate - to true for this action to be performed. If - false, action is disregarded. - type: string - eventRef: - description: References a 'trigger' and 'result' - reusable event definitions. - properties: - contextAttributes: - description: Add additional extension context - attributes to the produced event. - x-kubernetes-preserve-unknown-fields: true - data: - description: |- - If string type, an expression which selects parts of the states data output to become the data (payload) - of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - of the event referenced by triggerEventRef. - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - resultEventRef: - description: Reference to the unique name - of a 'consumed' event definition - type: string - resultEventTimeout: - description: |- - Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - actionExecutionTimeout - type: string - triggerEventRef: - description: Reference to the unique name - of a 'produced' event definition, - type: string - required: - - resultEventRef - - triggerEventRef - type: object - functionRef: - description: References a reusable function - definition. - properties: - arguments: - description: Arguments (inputs) to be passed - to the referenced function - x-kubernetes-preserve-unknown-fields: true - invoke: - default: sync - description: Specifies if the function should - be invoked sync or async. Default is sync. - enum: - - async - - sync - type: string - refName: - description: Name of the referenced function. - type: string - selectionSet: - description: Used if function type is graphql. - String containing a valid GraphQL selection - set. - type: string - required: - - refName - type: object - id: - description: Defines Unique action identifier. - type: string - name: - description: Defines Unique action name. - type: string - nonRetryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should not be retried. - Used only when `autoRetries` is set to `true` - items: - type: string - type: array - retryRef: - description: References a defined workflow retry - definition. If not defined uses the default - runtime retry definition. - type: string - retryableErrors: - description: |- - List of unique references to defined workflow errors for which the action should be retried. - Used only when `autoRetries` is set to `false` - items: - type: string - type: array - sleep: - description: Defines time period workflow execution - should sleep before / after function execution. - properties: - after: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - before: - description: |- - Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - Does not apply if 'eventRef' is defined. - type: string - type: object - subFlowRef: - description: References a workflow to be invoked. - properties: - invoke: - default: sync - description: |- - Specifies if the subflow should be invoked sync or async. - Defaults to sync. - enum: - - async - - sync - type: string - onParentComplete: - default: terminate - description: |- - onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - is 'async'. Defaults to terminate. - enum: - - terminate - - continue - type: string - version: - description: Sub-workflow version - type: string - workflowId: - description: Sub-workflow unique id - type: string - required: - - workflowId - type: object - type: object - minItems: 1 - type: array - name: - description: Branch name - type: string - timeouts: - description: Branch specific timeout settings - properties: - actionExecTimeout: - description: Single actions definition execution - timeout duration (ISO 8601 duration format) - type: string - branchExecTimeout: - description: Single branch execution timeout duration - (ISO 8601 duration format) - type: string - type: object - required: - - actions - - name - type: object - minItems: 1 - type: array - completionType: - default: allOf - description: Option types on how to complete branch execution. - Defaults to `allOf`. - enum: - - allOf - - atLeast - type: string - numCompleted: - anyOf: - - type: integer - - type: string - description: |- - Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete - in order for the state to transition/end. - x-kubernetes-int-or-string: true - timeouts: - description: State specific timeouts - properties: - branchExecTimeout: - description: Default single branch execution timeout - (ISO 8601 duration format) - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - branches - type: object - sleepState: - description: sleepState suspends workflow execution for a given - time duration. - properties: - duration: - description: Duration (ISO 8601 duration format) to sleep - type: string - timeouts: - description: Timeouts State specific timeouts - properties: - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - duration - type: object - stateDataFilter: - description: State data filter. - properties: - input: - description: Workflow expression to filter the state data - input - type: string - output: - description: Workflow expression that filters the state - data output - type: string - type: object - switchState: - description: 'switchState is workflow''s gateways: direct transitions - onf a workflow based on certain conditions.' - properties: - dataConditions: - description: Defines conditions evaluated against data - items: - description: |- - DataCondition specify a data-based condition statement which causes a transition to another workflow state - if evaluated to true. - properties: - condition: - description: Workflow expression evaluated against - state data. Must evaluate to true or false. - type: string - end: - description: Explicit transition to end - properties: - compensate: - description: If set to true, triggers workflow - compensation before workflow execution completes. - Default is false. - type: boolean - continueAs: - description: |- - Defines that current workflow execution should stop, and execution should continue as a new workflow - instance of the provided id - properties: - data: - description: |- - If string type, an expression which selects parts of the states data output to become the workflow data input of - continued execution. If object type, a custom object to become the workflow data input of the continued execution - x-kubernetes-preserve-unknown-fields: true - version: - description: Version of the workflow to continue - execution as. - type: string - workflowExecTimeout: - description: |- - WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. - Overwrites any specific settings set by that workflow - properties: - duration: - default: unlimited - description: Workflow execution timeout - duration (ISO 8601 duration format). - If not specified should be 'unlimited'. - type: string - interrupt: - description: |- - If false, workflow instance is allowed to finish current execution. If true, current workflow execution - is stopped immediately. Default is false. - type: boolean - runBefore: - description: Name of a workflow state - to be executed before workflow instance - is terminated. - type: string - required: - - duration - type: object - workflowId: - description: Unique id of the workflow to - continue execution as. - type: string - required: - - workflowId - type: object - produceEvents: - description: Array of producedEvent definitions. - Defines events that should be produced. - items: - description: |- - ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a - workflow transitions. The eventRef property must match the name of one of the defined produced events in the - events definition. - properties: - contextAttributes: - additionalProperties: - type: string - description: Add additional event extension - context attributes. - type: object - data: - description: |- - If String, expression which selects parts of the states data output to become the data of the produced event. - If object a custom object to become the data of produced event. - x-kubernetes-preserve-unknown-fields: true - eventRef: - description: Reference to a defined unique - event name in the events definition - type: string - required: - - eventRef - type: object - type: array - terminate: - description: If true, completes all execution - flows in the given workflow instance. - type: boolean - type: object - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: Data condition name. - type: string - transition: - description: Workflow transition if condition is evaluated - to true - properties: - compensate: - default: false - description: If set to true, triggers workflow - compensation before this transition is taken. - Default is false. - type: boolean - nextState: - description: Name of the state to transition to - next. - type: string - produceEvents: - description: Array of producedEvent definitions. - Events to be produced before the transition - takes place. - items: - description: |- - ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a - workflow transitions. The eventRef property must match the name of one of the defined produced events in the - events definition. - properties: - contextAttributes: - additionalProperties: - type: string - description: Add additional event extension - context attributes. - type: object - data: - description: |- - If String, expression which selects parts of the states data output to become the data of the produced event. - If object a custom object to become the data of produced event. - x-kubernetes-preserve-unknown-fields: true - eventRef: - description: Reference to a defined unique - event name in the events definition - type: string - required: - - eventRef - type: object - type: array - required: - - nextState - type: object - required: - - condition - - end - type: object - type: array - defaultCondition: - description: |- - Default transition of the workflow if there is no matching data conditions. Can include a transition or - end definition. - properties: - end: - description: "\tIf this state an end state" - x-kubernetes-preserve-unknown-fields: true - transition: - description: |- - Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - Each state can define a transition definition that is used to determine which state to transition to next. - x-kubernetes-preserve-unknown-fields: true - type: object - eventConditions: - description: Defines conditions evaluated against events. - items: - description: EventCondition specify events which the switch - state must wait for. - properties: - end: - description: Explicit transition to end - x-kubernetes-preserve-unknown-fields: true - eventDataFilter: - description: Event data filter definition. - properties: - data: - description: Workflow expression that filters - of the event data (payload). - type: string - toStateData: - description: |- - Workflow expression that selects a state data element to which the action results should be added/merged into. - If not specified denotes the top-level state data element - type: string - useData: - description: |- - If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - should be ignored. Default is true. - type: boolean - type: object - eventRef: - description: References a unique event name in the - defined workflow events. - type: string - metadata: - description: Metadata information. - x-kubernetes-preserve-unknown-fields: true - name: - description: Event condition name. - type: string - transition: - description: Workflow transition if condition is evaluated - to true - x-kubernetes-preserve-unknown-fields: true - required: - - eventRef - type: object - type: array - timeouts: - description: SwitchState specific timeouts - properties: - eventTimeout: - description: |- - Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. - NOTE: this is only available for EventConditions - type: string - stateExecTimeout: - description: Default workflow state execution timeout - (ISO 8601 duration format) - properties: - single: - description: Single state execution timeout, not - including retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including - retries (ISO 8601 duration format) - type: string - required: - - total - type: object - type: object - required: - - defaultCondition - type: object - transition: - description: Next transition of the workflow after the time - delay. - x-kubernetes-preserve-unknown-fields: true - type: - description: stateType can be any of delay, callback, event, - foreach, inject, operation, parallel, sleep, switch - enum: - - delay - - callback - - event - - foreach - - inject - - operation - - parallel - - sleep - - switch - type: string - usedForCompensation: - description: If true, this state is used to compensate another - state. Default is false. - type: boolean - required: - - name - - type - type: object - minItems: 1 - type: array - x-kubernetes-preserve-unknown-fields: true - timeouts: - description: Defines the workflow default timeout settings. - properties: - actionExecTimeout: - description: ActionExecTimeout Single actions definition execution - timeout duration (ISO 8601 duration format). - type: string - branchExecTimeout: - description: BranchExecTimeout Single branch execution timeout - duration (ISO 8601 duration format). - type: string - eventTimeout: - description: EventTimeout Timeout duration to wait for consuming - defined events (ISO 8601 duration format). - type: string - stateExecTimeout: - description: StateExecTimeout Total state execution timeout (including - retries) (ISO 8601 duration format). - properties: - single: - description: Single state execution timeout, not including - retries (ISO 8601 duration format) - type: string - total: - description: Total state execution timeout, including retries - (ISO 8601 duration format) - type: string - required: - - total - type: object - workflowExecTimeout: - description: |- - WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should - be 'unlimited'. - properties: - duration: - default: unlimited - description: Workflow execution timeout duration (ISO 8601 - duration format). If not specified should be 'unlimited'. - type: string - interrupt: - description: |- - If false, workflow instance is allowed to finish current execution. If true, current workflow execution - is stopped immediately. Default is false. - type: boolean - runBefore: - description: Name of a workflow state to be executed before - workflow instance is terminated. - type: string - required: - - duration - type: object - type: object - version: - description: Workflow version. - type: string - required: - - specVersion - - states - type: object - status: - description: ServerlessWorkflowStatus ... - properties: - observedGeneration: - format: int64 - type: integer - type: object - type: object - served: true - storage: true - subresources: - status: {} diff --git a/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml b/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml deleted file mode 100644 index 5faa2c0..0000000 --- a/config/samples/serverlessworkflow.io_v1alpha1_serverlessworkflow.yaml +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This is an example on how the CR would look like when importing the sdk go types into your operator's spec -# the cr instantiation would be validated by this issue: https://github.com/serverlessworkflow/sdk-go/issues/152 - -apiVersion: io.serverlessworkflow/v1alpha1 -kind: ServerlessWorkflow -metadata: - name: custom.greeting -spec: - functions: - - name: greetFunction - type: custom - operation: sysout - states: - - name: ChooseOnLanguage - type: switch - dataConditions: - - condition: "${ .language == \"English\" }" - transition: GreetInEnglish - - condition: "${ .language == \"Spanish\" }" - transition: GreetInSpanish - - condition: "${ .language == \"Italian\" }" - transition: GreetInItalian - defaultCondition: GreetInEnglish - - name: GreetInEnglish - type: inject - data: - greeting: "Hello from JSON Workflow, " - transition: GreetPerson - - name: GreetInSpanish - type: inject - data: - greeting: "Saludos desde JSON Workflow, " - transition: GreetPerson - - name: GreetInItalian - type: inject - data: - greeting: "Saluti dal JSON Workflow, " - transition: GreetPerson - - name: GreetPerson - type: operation - actions: - - name: greetAction - functionRef: - refName: greetFunction - arguments: - message: ".greeting+.name" - end: true \ No newline at end of file diff --git a/go.mod b/go.mod index d2e3b7e..fc847fa 100644 --- a/go.mod +++ b/go.mod @@ -1,44 +1,29 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v3 -go 1.22.0 - -toolchain go1.23.1 +go 1.22 require ( - github.com/go-playground/validator/v10 v10.22.1 - github.com/pkg/errors v0.9.1 - github.com/relvacode/iso8601 v1.4.0 - github.com/sosodev/duration v1.3.1 - github.com/stretchr/testify v1.9.0 - gopkg.in/yaml.v3 v3.0.1 - k8s.io/apimachinery v0.31.1 - sigs.k8s.io/controller-runtime v0.19.0 + github.com/go-playground/validator/v10 v10.24.0 + github.com/itchyny/gojq v0.12.17 + github.com/stretchr/testify v1.10.0 + github.com/tidwall/gjson v1.18.0 sigs.k8s.io/yaml v1.4.0 ) require ( github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/fxamacker/cbor/v2 v2.7.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/google/gofuzz v1.2.0 // indirect - github.com/json-iterator/go v1.1.12 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/leodido/go-urn v1.4.0 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/x448/float16 v0.8.4 // indirect - golang.org/x/crypto v0.31.0 // indirect - golang.org/x/net v0.33.0 // indirect - golang.org/x/sys v0.28.0 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + golang.org/x/crypto v0.32.0 // indirect + golang.org/x/net v0.34.0 // indirect + golang.org/x/sys v0.29.0 // indirect golang.org/x/text v0.21.0 // indirect - gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - k8s.io/klog/v2 v2.130.1 // indirect - k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 393de63..257234a 100644 --- a/go.sum +++ b/go.sum @@ -1,124 +1,46 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= -github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= +github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= -github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= -github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= -github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg= +github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af h1:kmjWCqn2qkEml422C2Rrd27c3VGxi6a/6HNq8QmHRKM= -github.com/google/pprof v0.0.0-20240525223248-4bfdf5a9a2af/go.mod h1:K1liHPHnj73Fdn/EKuT8nrFqBihUSKXoLYU0BuatOYo= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= +github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= +github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= +github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA= -github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To= -github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= -github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/relvacode/iso8601 v1.4.0 h1:GsInVSEJfkYuirYFxa80nMLbH2aydgZpIf52gYZXUJs= -github.com/relvacode/iso8601 v1.4.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= -github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4= -github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= -github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= -golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= -golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= +golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -k8s.io/api v0.31.0 h1:b9LiSjR2ym/SzTOlfMHm1tr7/21aD7fSkqgD/CVJBCo= -k8s.io/api v0.31.0/go.mod h1:0YiFF+JfFxMM6+1hQei8FY8M7s1Mth+z/q7eF1aJkTE= -k8s.io/apimachinery v0.31.1 h1:mhcUBbj7KUjaVhyXILglcVjuS4nYXiwC+KKFBgIVy7U= -k8s.io/apimachinery v0.31.1/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo= -k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= -k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= -k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 h1:pUdcCO1Lk/tbT5ztQWOBi5HBgbBP1J8+AsQnQCKsi8A= -k8s.io/utils v0.0.0-20240711033017-18e509b52bc8/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -sigs.k8s.io/controller-runtime v0.19.0 h1:nWVM7aq+Il2ABxwiCizrVDSlmDcshi9llbaFbC0ji/Q= -sigs.k8s.io/controller-runtime v0.19.0/go.mod h1:iRmWllt8IlaLjvTTDLhRBXIEtkCK6hwVBJJsYS9Ajf4= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/hack/builder-gen.sh b/hack/builder-gen.sh deleted file mode 100755 index 083b187..0000000 --- a/hack/builder-gen.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh -# and adapted to only install and run the deepcopy-gen - -set -o errexit -set -o nounset -set -o pipefail - -SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. -echo "Script root is $SCRIPT_ROOT" - -GENS="$1" -shift 1 - -( - # To support running this script from anywhere, first cd into this directory, - # and then install with forced module mode on and fully qualified name. - # make sure your GOPATH env is properly set. - # it will go under $GOPATH/bin - cd "$(dirname "${0}")" - GO111MODULE=on go install github.com/galgotech/builder-gen@latest -) - -function codegen::join() { local IFS="$1"; shift; echo "$*"; } - -if [ "${GENS}" = "all" ] || grep -qw "buildergen" <<<"${GENS}"; then - echo "Generating buildergen funcs" - export GO111MODULE=on - # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/builder-gen" -v 1 \ - --input-dirs ./model -O zz_generated.buildergen \ - --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" - "$@" -fi diff --git a/hack/conv/main.go b/hack/conv/main.go deleted file mode 100644 index e70e738..0000000 --- a/hack/conv/main.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "encoding/json" - "errors" - "log" - "os" - "path" - "path/filepath" - "strings" - - "gopkg.in/yaml.v3" - - "github.com/serverlessworkflow/sdk-go/v2/test" -) - -func convert(i interface{}) interface{} { - switch x := i.(type) { - case map[interface{}]interface{}: - m2 := map[string]interface{}{} - for k, v := range x { - m2[k.(string)] = convert(v) - } - return m2 - case []interface{}: - for i, v := range x { - x[i] = convert(v) - } - } - return i -} - -func transform( - files []string, - srcFormat string, - destFormat string, - unmarshal func(data []byte, out interface{}) error, - marshal func(in interface{}) ([]byte, error), -) { - for _, srcFile := range files { - if !strings.HasSuffix(srcFile, srcFormat) { - log.Printf("%s is not %s format, skip it", srcFile, srcFormat) - continue - } - - destFile := srcFile[0:len(srcFile)-len(srcFormat)] + destFormat - if _, err := os.Stat(destFile); err == nil { - log.Printf("ERR: the target file %v exists, skip it", destFile) - continue - } else if !errors.Is(err, os.ErrNotExist) { - log.Printf("ERR: stat target file %v, %v, skip it", destFile, err) - continue - } - - srcData, err := os.ReadFile(filepath.Clean(srcFile)) - if err != nil { - log.Printf("ERR: cannot read file %v, %v, skip it", srcFile, err) - continue - } - - var srcObj interface{} - err = unmarshal(srcData, &srcObj) - if err != nil { - log.Printf("ERR: cannot unmarshal file %v to %s, %v, skip it", srcFile, srcFormat, err) - continue - } - - destObj := convert(srcObj) - destData, err := marshal(destObj) - if err != nil { - log.Printf("ERR: cannot marshal fild %v data to %v, %v, skip it", srcFile, destFormat, err) - continue - } - - err = os.WriteFile(destFile, destData, 0600) - if err != nil { - log.Printf("ERR: cannot write to file %v, %v, skip it", destFile, err) - continue - } - - log.Printf("convert %v to %v done", srcFile, destFile) - } -} - -func main() { - // TODO: make this as argument - dir := path.Join(test.CurrentProjectPath(), "parser", "testdata", "workflows", "urifiles") - dirEntries, err := os.ReadDir(dir) - if err != nil { - panic(err) - } - - files := make([]string, 0, len(dirEntries)) - for _, entry := range dirEntries { - if entry.IsDir() { - log.Printf("%s is directory, skip it", entry.Name()) - continue - } - - files = append(files, path.Join(dir, entry.Name())) - } - - log.Printf("found %v files", len(files)) - - // First, convert all json format files to yaml - log.Printf("start to convert all json format files to yaml format") - transform(files, ".json", ".yaml", json.Unmarshal, yaml.Marshal) - - // Second, convert all yaml format files to json - log.Printf("start to convert all yaml format files to json format") - transform(files, ".yaml", ".json", yaml.Unmarshal, func(in interface{}) ([]byte, error) { - return json.MarshalIndent(in, "", " ") - }) -} diff --git a/hack/deepcopy-gen.sh b/hack/deepcopy-gen.sh deleted file mode 100755 index 9c7fe0f..0000000 --- a/hack/deepcopy-gen.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh -# and adapted to only install and run the deepcopy-gen - -set -o errexit -set -o nounset -set -o pipefail - -SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. -echo "Script root is $SCRIPT_ROOT" - -GENS="$1" -shift 1 - -( - # To support running this script from anywhere, first cd into this directory, - # and then install with forced module mode on and fully qualified name. - # make sure your GOPATH env is properly set. - # it will go under $GOPATH/bin - cd "$(dirname "${0}")" - DEEPCOPY_VERSION="v0.29.4" - GO111MODULE=on go install k8s.io/code-generator/cmd/deepcopy-gen@${DEEPCOPY_VERSION} -) - -function codegen::join() { local IFS="$1"; shift; echo "$*"; } - -if [ "${GENS}" = "all" ] || grep -qw "deepcopy" <<<"${GENS}"; then - echo "Generating deepcopy funcs" - export GO111MODULE=on - # for debug purposes, increase the log level by updating the -v flag to higher numbers, e.g. -v 4 - "${GOPATH}/bin/deepcopy-gen" -v 1 \ - --input-dirs ./model -O zz_generated.deepcopy \ - --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.txt" - "$@" -fi diff --git a/hack/integration-test.sh b/hack/integration-test.sh new file mode 100755 index 0000000..52f6889 --- /dev/null +++ b/hack/integration-test.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Script to fetch workflow examples, parse, and validate them using the Go parser. + +# Variables +SPEC_REPO="https://github.com/serverlessworkflow/specification" +EXAMPLES_DIR="examples" +PARSER_BINARY="./parser/cmd/main.go" +JUNIT_FILE="./integration-test-junit.xml" + +# Create a temporary directory +TEMP_DIR=$(mktemp -d) + +# Ensure temporary directory was created +if [ ! -d "$TEMP_DIR" ]; then + echo "❌ Failed to create a temporary directory." + exit 1 +fi + +# shellcheck disable=SC2317 +# Clean up the temporary directory on script exit +cleanup() { + echo "🧹 Cleaning up temporary directory..." + rm -rf "$TEMP_DIR" +} +trap cleanup EXIT + +# Fetch the examples directory +echo "πŸ“₯ Fetching workflow examples from ${SPEC_REPO}/${EXAMPLES_DIR}..." +if ! git clone --depth=1 --filter=blob:none --sparse "$SPEC_REPO" "$TEMP_DIR" &> /dev/null; then + echo "❌ Failed to clone specification repository." + exit 1 +fi + +cd "$TEMP_DIR" || exit +if ! git sparse-checkout set "$EXAMPLES_DIR" &> /dev/null; then + echo "❌ Failed to checkout examples directory." + exit 1 +fi + +cd - || exit + +# Prepare JUnit XML output +echo '' > "$JUNIT_FILE" +echo '' >> "$JUNIT_FILE" + +# Initialize test summary +total_tests=0 +failed_tests=0 + +# Walk through files and validate +echo "βš™οΈ Running parser on fetched examples..." +while IFS= read -r file; do + filename=$(basename "$file") + echo "πŸ” Validating: $filename" + + # Run the parser for the file + if go run "$PARSER_BINARY" "$file" > "$TEMP_DIR/validation.log" 2>&1; then + echo "βœ… Validation succeeded for $filename" + echo " " >> "$JUNIT_FILE" + else + echo "❌ Validation failed for $filename" + failure_message=$(cat "$TEMP_DIR/validation.log" | sed 's/&/&/g; s//>/g') + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + echo " " >> "$JUNIT_FILE" + ((failed_tests++)) + fi + + ((total_tests++)) +done < <(find "$TEMP_DIR/$EXAMPLES_DIR" -type f \( -name "*.yaml" -o -name "*.yml" -o -name "*.json" \)) + +# Finalize JUnit XML output +echo '' >> "$JUNIT_FILE" + +# Display test summary +if [ $failed_tests -ne 0 ]; then + echo "❌ Validation failed for $failed_tests out of $total_tests workflows." + exit 1 +else + echo "βœ… All $total_tests workflows validated successfully." +fi + +exit 0 diff --git a/kubernetes/api/v1alpha1/groupversion_info.go b/kubernetes/api/v1alpha1/groupversion_info.go deleted file mode 100644 index 135263e..0000000 --- a/kubernetes/api/v1alpha1/groupversion_info.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package v1alpha1 contains API Schema definitions for the serverless v1alpha1 API group -// +kubebuilder:object:generate=true -// +groupName=serverlessworkflow.io -package v1alpha1 - -import ( - "k8s.io/apimachinery/pkg/runtime/schema" - "sigs.k8s.io/controller-runtime/pkg/scheme" -) - -var ( - // GroupVersion is group version used to register these objects - GroupVersion = schema.GroupVersion{Group: "io.serverlessworkflow", Version: "v1alpha1"} - - // SchemeBuilder is used to add go types to the GroupVersionKind scheme - SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} - - // AddToScheme adds the types in this group-version to the given scheme. - AddToScheme = SchemeBuilder.AddToScheme -) - -// Resource takes an unqualified resource and returns a Group qualified GroupResource. -func Resource(resource string) schema.GroupResource { - return GroupVersion.WithResource(resource).GroupResource() -} diff --git a/kubernetes/api/v1alpha1/serverlessworkflow_types.go b/kubernetes/api/v1alpha1/serverlessworkflow_types.go deleted file mode 100644 index 7144062..0000000 --- a/kubernetes/api/v1alpha1/serverlessworkflow_types.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package v1alpha1 - -import ( - "github.com/serverlessworkflow/sdk-go/v2/model" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -// This package provides a very simple api for kubernetes operator to test the integration -// of the Serverless SDK-Go with operator-sdk controller-gen and deepcopy-gen tools. -// The purpose of this integration is to stop issues like below beforehand: -// github.com/serverlessworkflow/sdk-go/model/event.go:51:2: encountered struct field "" without JSON tag in type "Event" -// github.com/serverlessworkflow/sdk-go/model/states.go:66:12: unsupported AST kind *ast.InterfaceType - -// States should be objects that will be in the same array even if it belongs to -// different types. An issue similar to the below will happen when trying to deploy your custom CR: -// strict decoding error: unknown field "spec.states[0].dataConditions" -// To make the CRD is compliant to the specs there are two options, -// a flat struct with all states fields at the same level, -// or use the // +kubebuilder:pruning:PreserveUnknownFields -// kubebuilder validator and delegate the validation to the sdk-go validator using the admission webhook. -// TODO add a webhook example - -// ServerlessWorkflowSpec defines a base API for integration test with operator-sdk -// +k8s:openapi-gen=true -type ServerlessWorkflowSpec struct { - model.Workflow `json:",inline"` -} - -// ServerlessWorkflow ... -// +kubebuilder:object:root=true -// +kubebuilder:object:generate=true -// +kubebuilder:subresource:status -// +k8s:openapi-gen=true -type ServerlessWorkflow struct { - metav1.TypeMeta `json:",inline"` - metav1.ObjectMeta `json:"metadata,omitempty"` - - Spec ServerlessWorkflowSpec `json:"spec,omitempty"` - Status ServerlessWorkflowStatus `json:"status,omitempty"` -} - -// ServerlessWorkflowStatus ... -// +k8s:openapi-gen=true -type ServerlessWorkflowStatus struct { - // add your conditions struct here ... - - // +optional - ObservedGeneration int64 `json:"observedGeneration,omitempty"` -} - -// ServerlessWorkflowList contains a list of SDKServerlessWorkflow -// +kubebuilder:object:root=true -// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object -type ServerlessWorkflowList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []ServerlessWorkflow `json:"items"` -} - -func init() { - SchemeBuilder.Register(&ServerlessWorkflow{}, &ServerlessWorkflowList{}) -} diff --git a/kubernetes/api/v1alpha1/zz_generated.deepcopy.go b/kubernetes/api/v1alpha1/zz_generated.deepcopy.go deleted file mode 100644 index 453a82c..0000000 --- a/kubernetes/api/v1alpha1/zz_generated.deepcopy.go +++ /dev/null @@ -1,113 +0,0 @@ -//go:build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by controller-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - runtime "k8s.io/apimachinery/pkg/runtime" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflow) DeepCopyInto(out *ServerlessWorkflow) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - out.Status = in.Status -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflow. -func (in *ServerlessWorkflow) DeepCopy() *ServerlessWorkflow { - if in == nil { - return nil - } - out := new(ServerlessWorkflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *ServerlessWorkflow) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowList) DeepCopyInto(out *ServerlessWorkflowList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]ServerlessWorkflow, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowList. -func (in *ServerlessWorkflowList) DeepCopy() *ServerlessWorkflowList { - if in == nil { - return nil - } - out := new(ServerlessWorkflowList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *ServerlessWorkflowList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowSpec) DeepCopyInto(out *ServerlessWorkflowSpec) { - *out = *in - in.Workflow.DeepCopyInto(&out.Workflow) -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowSpec. -func (in *ServerlessWorkflowSpec) DeepCopy() *ServerlessWorkflowSpec { - if in == nil { - return nil - } - out := new(ServerlessWorkflowSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ServerlessWorkflowStatus) DeepCopyInto(out *ServerlessWorkflowStatus) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServerlessWorkflowStatus. -func (in *ServerlessWorkflowStatus) DeepCopy() *ServerlessWorkflowStatus { - if in == nil { - return nil - } - out := new(ServerlessWorkflowStatus) - in.DeepCopyInto(out) - return out -} diff --git a/model/action.go b/model/action.go deleted file mode 100644 index 2635849..0000000 --- a/model/action.go +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// Action specify invocations of services or other workflows during workflow execution. -// +builder-gen:new-call=ApplyDefault -type Action struct { - // Defines Unique action identifier. - // +optional - ID string `json:"id,omitempty"` - // Defines Unique action name. - // +optional - Name string `json:"name,omitempty"` - // References a reusable function definition. - // +optional - FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions. - // +optional - EventRef *EventRef `json:"eventRef,omitempty"` - // References a workflow to be invoked. - // +optional - SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` - // Defines time period workflow execution should sleep before / after function execution. - // +optional - Sleep *Sleep `json:"sleep,omitempty"` - // References a defined workflow retry definition. If not defined uses the default runtime retry definition. - // +optional - RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. - // Used only when `autoRetries` is set to `true` - // +optional - NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. - // Used only when `autoRetries` is set to `false` - // +optional - RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Filter the state data to select only the data that can be used within function definition arguments - // using its fromStateData property. Filter the action results to select only the result data that should - // be added/merged back into the state data using its results property. Select the part of state data which - // the action data results should be added/merged to using the toStateData property. - // +optional - ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` - // Expression, if defined, must evaluate to true for this action to be performed. If false, action is disregarded. - // +optional - Condition string `json:"condition,omitempty"` -} - -type actionUnmarshal Action - -// UnmarshalJSON implements json.Unmarshaler -func (a *Action) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("action", data, (*actionUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action -func (a *Action) ApplyDefault() { - a.ActionDataFilter.ApplyDefault() -} - -// FunctionRef defines the reference to a reusable function definition -// +builder-gen:new-call=ApplyDefault -type FunctionRef struct { - // Name of the referenced function. - // +kubebuilder:validation:Required - RefName string `json:"refName" validate:"required"` - // Arguments (inputs) to be passed to the referenced function - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // TODO: validate it as required if function type is graphql - Arguments map[string]Object `json:"arguments,omitempty"` - // Used if function type is graphql. String containing a valid GraphQL selection set. - // TODO: validate it as required if function type is graphql - // +optional - SelectionSet string `json:"selectionSet,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type functionRefUnmarshal FunctionRef - -// UnmarshalJSON implements json.Unmarshaler -func (f *FunctionRef) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function Ref -func (f *FunctionRef) ApplyDefault() { - f.Invoke = InvokeKindSync -} - -// Sleep defines time periods workflow execution should sleep before & after function execution -type Sleep struct { - // Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - Before string `json:"before,omitempty" validate:"omitempty,iso8601duration"` - // Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` -} - -type sleepUnmarshal Sleep - -// UnmarshalJSON implements json.Unmarshaler -func (s *Sleep) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("sleep", data, (*sleepUnmarshal)(s)) -} diff --git a/model/action_data_filter.go b/model/action_data_filter.go deleted file mode 100644 index e929f6b..0000000 --- a/model/action_data_filter.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// ActionDataFilter used to filter action data results. -// +optional -// +builder-gen:new-call=ApplyDefault -type ActionDataFilter struct { - // Workflow expression that filters state data that can be used by the action. - // +optional - FromStateData string `json:"fromStateData,omitempty"` - // If set to false, action data results are not added/merged to state data. In this case 'results' - // and 'toStateData' should be ignored. Default is true. - // +optional - UseResults bool `json:"useResults,omitempty"` - // Workflow expression that filters the actions data results. - // +optional - Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be - // added/merged into. If not specified denotes the top-level state data element. - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type actionDataFilterUnmarshal ActionDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (a *ActionDataFilter) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action Data Filter -func (a *ActionDataFilter) ApplyDefault() { - a.UseResults = true -} diff --git a/model/action_data_filter_test.go b/model/action_data_filter_test.go deleted file mode 100644 index cae511a..0000000 --- a/model/action_data_filter_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestActionDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ActionDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: true, - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3", "useResults": false}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: false, - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: ActionDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"fromStateData": 1, "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{}, - err: `actionDataFilter.fromStateData must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ActionDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_test.go b/model/action_test.go deleted file mode 100644 index 55c399d..0000000 --- a/model/action_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFunctionRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect FunctionRef - err string - } - - testCases := []testCase{ - { - desp: "invalid object refName", - data: `{"refName": 1}`, - expect: FunctionRef{}, - err: "functionRef.refName must be string", - }, - { - desp: "object with refName", - data: `{"refName": "function name"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "object with refName and Invoke", - data: `{"refName": "function name", "invoke": "async"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "refName string", - data: `"function name"`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - } - - for _, tc := range testCases[:1] { - t.Run(tc.desp, func(t *testing.T) { - var v FunctionRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_validator.go b/model/action_validator.go deleted file mode 100644 index 3fac375..0000000 --- a/model/action_validator.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(actionStructLevelValidationCtx), Action{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(functionRefStructLevelValidation), FunctionRef{}) -} - -func actionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - action := structLevel.Current().Interface().(Action) - - if action.FunctionRef == nil && action.EventRef == nil && action.SubFlowRef == nil { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", "required_without", "") - return - } - - values := []bool{ - action.FunctionRef != nil, - action.EventRef != nil, - action.SubFlowRef != nil, - } - - if validationNotExclusiveParameters(values) { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", val.TagExclusive, "") - structLevel.ReportError(action.EventRef, "EventRef", "EventRef", val.TagExclusive, "") - structLevel.ReportError(action.SubFlowRef, "SubFlowRef", "SubFlowRef", val.TagExclusive, "") - } - - if action.RetryRef != "" && !ctx.ExistRetry(action.RetryRef) { - structLevel.ReportError(action.RetryRef, "RetryRef", "RetryRef", val.TagExists, "") - } -} - -func functionRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - functionRef := structLevel.Current().Interface().(FunctionRef) - if !ctx.ExistFunction(functionRef.RefName) { - structLevel.ReportError(functionRef.RefName, "RefName", "RefName", val.TagExists, functionRef.RefName) - } -} diff --git a/model/action_validator_test.go b/model/action_validator_test.go deleted file mode 100644 index 84424b5..0000000 --- a/model/action_validator_test.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildActionByOperationState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.OperationState.Actions = append(state.OperationState.Actions, action) - return &state.OperationState.Actions[len(state.OperationState.Actions)-1] -} - -func buildActionByForEachState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.ForEachState.Actions = append(state.ForEachState.Actions, action) - return &state.ForEachState.Actions[len(state.ForEachState.Actions)-1] -} - -func buildActionByBranch(branch *Branch, name string) *Action { - action := Action{ - Name: name, - } - - branch.Actions = append(branch.Actions, action) - return &branch.Actions[len(branch.Actions)-1] -} - -func buildFunctionRef(workflow *Workflow, action *Action, name string) (*FunctionRef, *Function) { - function := Function{ - Name: name, - Operation: "http://function/function_name", - Type: FunctionTypeREST, - } - - functionRef := FunctionRef{ - RefName: name, - Invoke: InvokeKindSync, - } - action.FunctionRef = &functionRef - - workflow.Functions = append(workflow.Functions, function) - return &functionRef, &function -} - -func buildRetryRef(workflow *Workflow, action *Action, name string) { - retry := Retry{ - Name: name, - MaxAttempts: intstr.FromInt32(1), - } - - workflow.Retries = append(workflow.Retries, retry) - action.RetryRef = name -} - -func buildSleep(action *Action) *Sleep { - action.Sleep = &Sleep{ - Before: "PT5S", - After: "PT5S", - } - return action.Sleep -} - -func TestActionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "require_without", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef = nil - return *model - }, - Err: `workflow.states[0].actions[0].functionRef required when "eventRef" or "subFlowRef" is not defined`, - }, - { - Desp: "exclude", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventRef(model, &model.States[0].OperationState.Actions[0], "event 1", "event2") - return *model - }, - Err: `workflow.states[0].actions[0].functionRef exclusive -workflow.states[0].actions[0].eventRef exclusive -workflow.states[0].actions[0].subFlowRef exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestFunctionRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.RefName = "invalid function" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.refName don't exist "invalid function"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildSleep(action1) - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "" - model.States[0].OperationState.Actions[0].Sleep.After = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "P5S" - model.States[0].OperationState.Actions[0].Sleep.After = "P5S" - return *model - }, - Err: `workflow.states[0].actions[0].sleep.before invalid iso8601 duration "P5S" -workflow.states[0].actions[0].sleep.after invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} diff --git a/model/auth.go b/model/auth.go deleted file mode 100644 index 6632265..0000000 --- a/model/auth.go +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// AuthType can be "basic", "bearer", or "oauth2". Default is "basic" -type AuthType string - -func (i AuthType) KindValues() []string { - return []string{ - string(AuthTypeBasic), - string(AuthTypeBearer), - string(AuthTypeOAuth2), - } -} - -func (i AuthType) String() string { - return string(i) -} - -const ( - // AuthTypeBasic ... - AuthTypeBasic AuthType = "basic" - // AuthTypeBearer ... - AuthTypeBearer AuthType = "bearer" - // AuthTypeOAuth2 ... - AuthTypeOAuth2 AuthType = "oauth2" -) - -// GrantType ... -type GrantType string - -func (i GrantType) KindValues() []string { - return []string{ - string(GrantTypePassword), - string(GrantTypeClientCredentials), - string(GrantTypeTokenExchange), - } -} - -func (i GrantType) String() string { - return string(i) -} - -const ( - // GrantTypePassword ... - GrantTypePassword GrantType = "password" - // GrantTypeClientCredentials ... - GrantTypeClientCredentials GrantType = "clientCredentials" - // GrantTypeTokenExchange ... - GrantTypeTokenExchange GrantType = "tokenExchange" -) - -// Auth definitions can be used to define authentication information that should be applied to resources -// defined in the operation property of function definitions. It is not used as authentication information -// for the function invocation, but just to access the resource containing the function invocation information. -type Auth struct { - // Unique auth definition name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Auth scheme, can be "basic", "bearer", or "oauth2". Default is "basic" - // +kubebuilder:validation:Enum=basic;bearer;oauth2 - // +kubebuilder:default=basic - // +kubebuilder:validation:Required - Scheme AuthType `json:"scheme" validate:"required,oneofkind"` - // Auth scheme properties. Can be one of "Basic properties definition", "Bearer properties definition", - // or "OAuth2 properties definition" - // +kubebuilder:validation:Required - Properties AuthProperties `json:"properties" validate:"required"` -} - -type authUnmarshal Auth - -// UnmarshalJSON Auth definition -func (a *Auth) UnmarshalJSON(data []byte) error { - authTmp := struct { - authUnmarshal - PropertiesRaw json.RawMessage `json:"properties"` - }{} - - err := util.UnmarshalObjectOrFile("auth", data, &authTmp) - if err != nil { - return err - } - - *a = Auth(authTmp.authUnmarshal) - if len(a.Scheme) == 0 { - a.Scheme = AuthTypeBasic - } - - switch a.Scheme { - case AuthTypeBasic: - a.Properties.Basic = &BasicAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) - case AuthTypeBearer: - a.Properties.Bearer = &BearerAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) - case AuthTypeOAuth2: - a.Properties.OAuth2 = &OAuth2AuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) - default: - return fmt.Errorf("failed to parse auth properties") - } -} - -func (a *Auth) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - Name string `json:"name" validate:"required"` - Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` - Properties AuthProperties `json:"properties" validate:"required"` - }{ - Name: a.Name, - Scheme: a.Scheme, - Properties: a.Properties, - }) - if err != nil { - fmt.Println(err) - } - st := strings.Replace(string(custom), "null,", "", 1) - st = strings.Replace(st, "\"Basic\":", "", 1) - st = strings.Replace(st, "\"Oauth2\":", "", 1) - st = strings.Replace(st, "\"Bearer\":", "", 1) - st = strings.Replace(st, "{{", "{", 1) - st = strings.TrimSuffix(st, "}") - return []byte(st), nil -} - -// AuthProperties ... -type AuthProperties struct { - Basic *BasicAuthProperties `json:",omitempty"` - Bearer *BearerAuthProperties `json:",omitempty"` - OAuth2 *OAuth2AuthProperties `json:",omitempty"` -} - -// BasicAuthProperties Basic Auth Info -type BasicAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Username String or a workflow expression. Contains the username - // +kubebuilder:validation:Required - Username string `json:"username" validate:"required"` - // Password String or a workflow expression. Contains the user password - // +kubebuilder:validation:Required - Password string `json:"password" validate:"required"` -} - -// BearerAuthProperties Bearer auth information -type BearerAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Token String or a workflow expression. Contains the token - // +kubebuilder:validation:Required - Token string `json:"token" validate:"required"` -} - -// OAuth2AuthProperties OAuth2 information -type OAuth2AuthProperties struct { - Common `json:",inline"` - // Expression referencing a workflow secret that contains all needed auth info. - // +optional - Secret string `json:"secret,omitempty"` - // String or a workflow expression. Contains the authority information. - // +optional - Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` - // Defines the grant type. Can be "password", "clientCredentials", or "tokenExchange" - // +kubebuilder:validation:Enum=password;clientCredentials;tokenExchange - // +kubebuilder:validation:Required - GrantType GrantType `json:"grantType" validate:"required,oneofkind"` - // String or a workflow expression. Contains the client identifier. - // +kubebuilder:validation:Required - ClientID string `json:"clientId" validate:"required"` - // Workflow secret or a workflow expression. Contains the client secret. - // +optional - ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 scopes. - // +optional - Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner'. - // +optional - Username string `json:"username,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner'. - // +optional - Password string `json:"password,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 audiences. - // +optional - Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the subject token. - // +optional - SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested subject. - // +optional - RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested issuer. - // +optional - RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` -} - -// TODO: use reflection to unmarshal the keys and think on a generic approach to handle them diff --git a/model/auth_test.go b/model/auth_test.go deleted file mode 100644 index 60602a2..0000000 --- a/model/auth_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { - t.Run("BearerAuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "bearer", - "properties": { - "token": "token1" - } - }` - a2JSON := `{ - "name": "a2", - "scheme": "bearer", - "properties": { - "token": "token2" - } - }` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.Bearer - a2Properties := a2.Properties.Bearer - - assert.Equal(t, "token1", a1Properties.Token) - assert.Equal(t, "token2", a2Properties.Token) - assert.NotEqual(t, a1Properties, a2Properties) - }) - - t.Run("OAuth2AuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret1" - } -}` - - a2JSON := `{ - "name": "a2", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret2" - } -}` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.OAuth2 - a2Properties := a2.Properties.OAuth2 - - assert.Equal(t, "secret1", a1Properties.ClientSecret) - assert.Equal(t, "secret2", a2Properties.ClientSecret) - assert.NotEqual(t, a1Properties, a2Properties) - }) -} diff --git a/model/auth_validator_test.go b/model/auth_validator_test.go deleted file mode 100644 index e2ce55d..0000000 --- a/model/auth_validator_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildAuth(workflow *Workflow, name string) *Auth { - auth := Auth{ - Name: name, - Scheme: AuthTypeBasic, - } - workflow.Auth = append(workflow.Auth, auth) - return &workflow.Auth[len(workflow.Auth)-1] -} - -func buildBasicAuthProperties(auth *Auth) *BasicAuthProperties { - auth.Scheme = AuthTypeBasic - auth.Properties = AuthProperties{ - Basic: &BasicAuthProperties{ - Username: "username", - Password: "password", - }, - } - - return auth.Properties.Basic -} - -func buildOAuth2AuthProperties(auth *Auth) *OAuth2AuthProperties { - auth.Scheme = AuthTypeOAuth2 - auth.Properties = AuthProperties{ - OAuth2: &OAuth2AuthProperties{ - ClientID: "clientId", - GrantType: GrantTypePassword, - }, - } - - return auth.Properties.OAuth2 -} - -func buildBearerAuthProperties(auth *Auth) *BearerAuthProperties { - auth.Scheme = AuthTypeBearer - auth.Properties = AuthProperties{ - Bearer: &BearerAuthProperties{ - Token: "token", - }, - } - - return auth.Properties.Bearer -} - -func TestAuthStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Name = "" - return *model - }, - Err: `workflow.auth[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth = append(model.Auth, model.Auth[0]) - return *model - }, - Err: `workflow.auth has duplicate "name"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestBasicAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Basic.Username = "" - model.Auth[0].Properties.Basic.Password = "" - return *model - }, - Err: `workflow.auth[0].properties.basic.username is required -workflow.auth[0].properties.basic.password is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBearerAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBearerAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Bearer.Token = "" - return *model - }, - Err: `workflow.auth[0].properties.bearer.token is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOAuth2AuthPropertiesPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildOAuth2AuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = "" - model.Auth[0].Properties.OAuth2.ClientID = "" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType is required -workflow.auth[0].properties.oAuth2.clientID is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = GrantTypePassword + "invalid" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType need by one of [password clientCredentials tokenExchange]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/authentication.go b/model/authentication.go new file mode 100644 index 0000000..35f06a4 --- /dev/null +++ b/model/authentication.go @@ -0,0 +1,187 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// AuthenticationPolicy Defines an authentication policy. +type AuthenticationPolicy struct { + Basic *BasicAuthenticationPolicy `json:"basic,omitempty"` + Bearer *BearerAuthenticationPolicy `json:"bearer,omitempty"` + Digest *DigestAuthenticationPolicy `json:"digest,omitempty"` + OAuth2 *OAuth2AuthenticationPolicy `json:"oauth2,omitempty"` + OIDC *OpenIdConnectAuthenticationPolicy `json:"oidc,omitempty"` +} + +// UnmarshalJSON for AuthenticationPolicy to enforce "oneOf" behavior. +func (ap *AuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Create temporary maps to detect which field is populated + temp := struct { + Basic json.RawMessage `json:"basic"` + Bearer json.RawMessage `json:"bearer"` + Digest json.RawMessage `json:"digest"` + OAuth2 json.RawMessage `json:"oauth2"` + OIDC json.RawMessage `json:"oidc"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if len(temp.Basic) > 0 { + count++ + ap.Basic = &BasicAuthenticationPolicy{} + if err := json.Unmarshal(temp.Basic, ap.Basic); err != nil { + return err + } + } + if len(temp.Bearer) > 0 { + count++ + ap.Bearer = &BearerAuthenticationPolicy{} + if err := json.Unmarshal(temp.Bearer, ap.Bearer); err != nil { + return err + } + } + if len(temp.Digest) > 0 { + count++ + ap.Digest = &DigestAuthenticationPolicy{} + if err := json.Unmarshal(temp.Digest, ap.Digest); err != nil { + return err + } + } + if len(temp.OAuth2) > 0 { + count++ + ap.OAuth2 = &OAuth2AuthenticationPolicy{} + if err := json.Unmarshal(temp.OAuth2, ap.OAuth2); err != nil { + return err + } + } + if len(temp.OIDC) > 0 { + count++ + ap.OIDC = &OpenIdConnectAuthenticationPolicy{} + if err := json.Unmarshal(temp.OIDC, ap.OIDC); err != nil { + return err + } + } + + // Ensure only one field is set + if count != 1 { + return errors.New("invalid AuthenticationPolicy: only one authentication type must be specified") + } + return nil +} + +// MarshalJSON for AuthenticationPolicy. +func (ap *AuthenticationPolicy) MarshalJSON() ([]byte, error) { + if ap.Basic != nil { + return json.Marshal(map[string]interface{}{"basic": ap.Basic}) + } + if ap.Bearer != nil { + return json.Marshal(map[string]interface{}{"bearer": ap.Bearer}) + } + if ap.Digest != nil { + return json.Marshal(map[string]interface{}{"digest": ap.Digest}) + } + if ap.OAuth2 != nil { + return json.Marshal(map[string]interface{}{"oauth2": ap.OAuth2}) + } + if ap.OIDC != nil { + return json.Marshal(map[string]interface{}{"oidc": ap.OIDC}) + } + // Add logic for other fields... + return nil, errors.New("invalid AuthenticationPolicy: no valid configuration to marshal") +} + +// ReferenceableAuthenticationPolicy represents a referenceable authentication policy. +type ReferenceableAuthenticationPolicy struct { + Use *string `json:"use,omitempty"` + AuthenticationPolicy *AuthenticationPolicy `json:",inline"` +} + +// UnmarshalJSON for ReferenceableAuthenticationPolicy enforces the "oneOf" behavior. +func (rap *ReferenceableAuthenticationPolicy) UnmarshalJSON(data []byte) error { + // Temporary structure to detect which field is populated + temp := struct { + Use *string `json:"use"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Check if `use` is set + if temp.Use != nil { + rap.Use = temp.Use + return nil + } + + // If `use` is not set, try unmarshalling inline AuthenticationPolicy + var ap AuthenticationPolicy + if err := json.Unmarshal(data, &ap); err != nil { + return err + } + + rap.AuthenticationPolicy = &ap + return nil +} + +// MarshalJSON for ReferenceableAuthenticationPolicy. +func (rap *ReferenceableAuthenticationPolicy) MarshalJSON() ([]byte, error) { + if rap.Use != nil { + return json.Marshal(map[string]interface{}{"use": rap.Use}) + } + if rap.AuthenticationPolicy != nil { + return json.Marshal(rap.AuthenticationPolicy) + } + return nil, errors.New("invalid ReferenceableAuthenticationPolicy: no valid configuration to marshal") +} + +func NewBasicAuth(username, password string) *AuthenticationPolicy { + return &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: username, + Password: password, + }} +} + +// BasicAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type BasicAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,basic_policy"` +} + +// BearerAuthenticationPolicy supports either an inline token or a secret reference (use). +type BearerAuthenticationPolicy struct { + Token string `json:"token,omitempty" validate:"required_without=Use,bearer_policy"` + Use string `json:"use,omitempty" validate:"required_without=Token"` +} + +// DigestAuthenticationPolicy supports either inline properties (username/password) or a secret reference (use). +type DigestAuthenticationPolicy struct { + Username string `json:"username,omitempty" validate:"required_without=Use"` + Password string `json:"password,omitempty" validate:"required_without=Use"` + Use string `json:"use,omitempty" validate:"required_without_all=Username Password,digest_policy"` +} + +// OpenIdConnectAuthenticationPolicy Use OpenIdConnect authentication. +type OpenIdConnectAuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"omitempty,required_without=Use"` + Use string `json:"use,omitempty" validate:"omitempty,required_without=Properties"` +} diff --git a/model/authentication_oauth.go b/model/authentication_oauth.go new file mode 100644 index 0000000..e6e5f54 --- /dev/null +++ b/model/authentication_oauth.go @@ -0,0 +1,212 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Endpoints are composed here and not on a separate wrapper object to avoid too many nested objects and inline marshaling. +// This allows us to reuse OAuth2AuthenticationProperties also on OpenIdConnectAuthenticationPolicy + +type OAuth2AuthenticationPolicy struct { + Properties *OAuth2AuthenticationProperties `json:",omitempty" validate:"required_without=Use"` + Endpoints *OAuth2Endpoints `json:"endpoints,omitempty"` + Use string `json:"use,omitempty" validate:"oauth2_policy"` +} + +func (o *OAuth2AuthenticationPolicy) ApplyDefaults() { + if o.Endpoints == nil { + return + } + + // Apply defaults if the respective fields are empty + if o.Endpoints.Token == "" { + o.Endpoints.Token = OAuth2DefaultTokenURI + } + if o.Endpoints.Revocation == "" { + o.Endpoints.Revocation = OAuth2DefaultRevokeURI + } + if o.Endpoints.Introspection == "" { + o.Endpoints.Introspection = OAuth2DefaultIntrospectionURI + } +} + +func (o *OAuth2AuthenticationPolicy) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationPolicy + aux := &struct { + *Alias + }{ + Alias: (*Alias)(o), + } + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Initialize Properties if any field for it is set + if o.Properties == nil && containsOAuth2Properties(data) { + o.Properties = &OAuth2AuthenticationProperties{} + if err := json.Unmarshal(data, o.Properties); err != nil { + return err + } + } + + return nil +} + +func containsOAuth2Properties(data []byte) bool { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return false + } + for key := range raw { + if key != "use" { + return true + } + } + return false +} + +// MarshalJSON customizes the JSON output for OAuth2AuthenticationPolicy +func (o *OAuth2AuthenticationPolicy) MarshalJSON() ([]byte, error) { + o.ApplyDefaults() + // Create a map to hold the resulting JSON + result := make(map[string]interface{}) + + // Inline Properties fields if present + if o.Properties != nil { + propertiesJSON, err := json.Marshal(o.Properties) + if err != nil { + return nil, err + } + + var propertiesMap map[string]interface{} + if err := json.Unmarshal(propertiesJSON, &propertiesMap); err != nil { + return nil, err + } + + for key, value := range propertiesMap { + result[key] = value + } + } + + // Add the Use field if present + if o.Use != "" { + result["use"] = o.Use + } + + return json.Marshal(result) +} + +type OAuth2AuthenticationProperties struct { + Authority URITemplate `json:"authority,omitempty"` + Grant OAuth2AuthenticationDataGrant `json:"grant,omitempty" validate:"oneof='authorization_code' 'client_credentials' 'password' 'refresh_token' 'urn:ietf:params:oauth:grant-type:token-exchange'"` + Client *OAuth2AutenthicationDataClient `json:"client,omitempty"` + Request *OAuth2TokenRequest `json:"request,omitempty"` + Issuers []string `json:"issuers,omitempty"` + Scopes []string `json:"scopes,omitempty"` + Audiences []string `json:"audiences,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + Subject *OAuth2Token `json:"subject,omitempty"` + Actor *OAuth2Token `json:"actor,omitempty"` +} + +func (o *OAuth2AuthenticationProperties) UnmarshalJSON(data []byte) error { + type Alias OAuth2AuthenticationProperties + aux := &struct { + Authority json.RawMessage `json:"authority"` + *Alias + }{ + Alias: (*Alias)(o), + } + + if err := json.Unmarshal(data, &aux); err != nil { + return fmt.Errorf("failed to unmarshal OAuth2AuthenticationProperties: %w", err) + } + + // Unmarshal the Authority field + if aux.Authority != nil { + uri, err := UnmarshalURITemplate(aux.Authority) + if err != nil { + return fmt.Errorf("invalid authority URI: %w", err) + } + o.Authority = uri + } + + return nil +} + +// OAuth2AuthenticationDataGrant represents the grant type to use in OAuth2 authentication. +type OAuth2AuthenticationDataGrant string + +// Valid grant types +const ( + AuthorizationCodeGrant OAuth2AuthenticationDataGrant = "authorization_code" + ClientCredentialsGrant OAuth2AuthenticationDataGrant = "client_credentials" + PasswordGrant OAuth2AuthenticationDataGrant = "password" + RefreshTokenGrant OAuth2AuthenticationDataGrant = "refresh_token" + TokenExchangeGrant OAuth2AuthenticationDataGrant = "urn:ietf:params:oauth:grant-type:token-exchange" // #nosec G101 +) + +type OAuthClientAuthenticationType string + +const ( + OAuthClientAuthClientSecretBasic OAuthClientAuthenticationType = "client_secret_basic" + OAuthClientAuthClientSecretPost OAuthClientAuthenticationType = "client_secret_post" + OAuthClientAuthClientSecretJWT OAuthClientAuthenticationType = "client_secret_jwt" + OAuthClientAuthPrivateKeyJWT OAuthClientAuthenticationType = "private_key_jwt" + OAuthClientAuthNone OAuthClientAuthenticationType = "none" +) + +type OAuth2TokenRequestEncodingType string + +const ( + EncodingTypeFormUrlEncoded OAuth2TokenRequestEncodingType = "application/x-www-form-urlencoded" + EncodingTypeApplicationJson OAuth2TokenRequestEncodingType = "application/json" +) + +// OAuth2AutenthicationDataClient The definition of an OAuth2 client. +type OAuth2AutenthicationDataClient struct { + ID string `json:"id,omitempty"` + Secret string `json:"secret,omitempty"` + Assertion string `json:"assertion,omitempty"` + Authentication OAuthClientAuthenticationType `json:"authentication,omitempty" validate:"client_auth_type"` +} + +type OAuth2TokenRequest struct { + Encoding OAuth2TokenRequestEncodingType `json:"encoding" validate:"encoding_type"` +} + +// OAuth2Token Represents an OAuth2 token. +type OAuth2Token struct { + // Token The security token to use + Token string `json:"token,omitempty"` + // Type The type of the security token to use. + Type string `json:"type,omitempty"` +} + +type OAuth2Endpoints struct { + Token string `json:"token,omitempty"` + Revocation string `json:"revocation,omitempty"` + Introspection string `json:"introspection,omitempty"` +} + +const ( + OAuth2DefaultTokenURI = "/oauth2/token" // #nosec G101 + OAuth2DefaultRevokeURI = "/oauth2/revoke" + OAuth2DefaultIntrospectionURI = "/oauth2/introspect" +) diff --git a/model/authentication_oauth_test.go b/model/authentication_oauth_test.go new file mode 100644 index 0000000..820dac1 --- /dev/null +++ b/model/authentication_oauth_test.go @@ -0,0 +1,164 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestOAuth2AuthenticationPolicyValidation(t *testing.T) { + testCases := []struct { + name string + policy OAuth2AuthenticationPolicy + shouldPass bool + }{ + { + name: "Valid: Use set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + }, + shouldPass: true, + }, + { + name: "Valid: Properties set", + policy: OAuth2AuthenticationPolicy{ + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: true, + }, + { + name: "Invalid: Both Use and Properties set", + policy: OAuth2AuthenticationPolicy{ + Use: "mysecret", + Properties: &OAuth2AuthenticationProperties{ + Grant: ClientCredentialsGrant, + Scopes: []string{"scope1", "scope2"}, + Authority: &LiteralUri{Value: "https://auth.example.com"}, + }, + }, + shouldPass: false, + }, + { + name: "Invalid: Neither Use nor Properties set", + policy: OAuth2AuthenticationPolicy{}, + shouldPass: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := validate.Struct(tc.policy) + if tc.shouldPass { + if err != nil { + t.Errorf("Expected validation to pass, but got error: %v", err) + } + } else { + if err == nil { + t.Errorf("Expected validation to fail, but it passed") + } + } + }) + } +} + +func TestAuthenticationOAuth2Policy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid OAuth2 Authentication Inline", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "scopes": ["scope1", "scope2"] + } + }`, + expected: `{"oauth2":{"authority":"https://auth.example.com","grant":"client_credentials","scopes":["scope1","scope2"]}}`, + expectsErr: false, + }, + { + name: "Valid OAuth2 Authentication Use", + input: `{ + "oauth2": { + "use": "mysecret" + } + }`, + expected: `{"oauth2":{"use":"mysecret"}}`, + expectsErr: false, + }, + { + name: "Invalid OAuth2: Both properties and use set", + input: `{ + "oauth2": { + "authority": "https://auth.example.com", + "grant": "client_credentials", + "use": "mysecret" + } + }`, + expectsErr: true, + }, + { + name: "Invalid OAuth2: Missing required fields", + input: `{ + "oauth2": {} + }`, + expectsErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + err = validate.Struct(authPolicy) + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/authentication_test.go b/model/authentication_test.go new file mode 100644 index 0000000..af0f687 --- /dev/null +++ b/model/authentication_test.go @@ -0,0 +1,98 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestAuthenticationPolicy(t *testing.T) { + testCases := []struct { + name string + input string + expected string + expectsErr bool + }{ + { + name: "Valid Basic Authentication Inline", + input: `{ + "basic": { + "username": "john", + "password": "12345" + } + }`, + expected: `{"basic":{"username":"john","password":"12345"}}`, + expectsErr: false, + }, + { + name: "Valid Digest Authentication Inline", + input: `{ + "digest": { + "username": "digestUser", + "password": "digestPass" + } + }`, + expected: `{"digest":{"username":"digestUser","password":"digestPass"}}`, + expectsErr: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var authPolicy AuthenticationPolicy + + // Unmarshal + err := json.Unmarshal([]byte(tc.input), &authPolicy) + if err == nil { + if authPolicy.Basic != nil { + err = validate.Struct(authPolicy.Basic) + } + if authPolicy.Bearer != nil { + err = validate.Struct(authPolicy.Bearer) + } + if authPolicy.Digest != nil { + err = validate.Struct(authPolicy.Digest) + } + if authPolicy.OAuth2 != nil { + err = validate.Struct(authPolicy.OAuth2) + } + } + + if tc.expectsErr { + if err == nil { + t.Errorf("Expected an error but got none") + } + } else { + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + // Marshal + marshaled, err := json.Marshal(authPolicy) + if err != nil { + t.Errorf("Failed to marshal: %v", err) + } + + if string(marshaled) != tc.expected { + t.Errorf("Expected %s but got %s", tc.expected, marshaled) + } + + fmt.Printf("Test '%s' passed. Marshaled output: %s\n", tc.name, marshaled) + } + }) + } +} diff --git a/model/builder.go b/model/builder.go new file mode 100644 index 0000000..81a51c6 --- /dev/null +++ b/model/builder.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + + "sigs.k8s.io/yaml" +) + +// WorkflowBuilder helps construct and serialize a Workflow object. +type WorkflowBuilder struct { + workflow *Workflow +} + +// NewWorkflowBuilder initializes a new WorkflowBuilder. +func NewWorkflowBuilder() *WorkflowBuilder { + return &WorkflowBuilder{ + workflow: &Workflow{ + Document: Document{}, + Do: &TaskList{}, + }, + } +} + +// SetDocument sets the Document fields in the Workflow. +func (wb *WorkflowBuilder) SetDocument(dsl, namespace, name, version string) *WorkflowBuilder { + wb.workflow.Document.DSL = dsl + wb.workflow.Document.Namespace = namespace + wb.workflow.Document.Name = name + wb.workflow.Document.Version = version + return wb +} + +// AddTask adds a TaskItem to the Workflow's Do list. +func (wb *WorkflowBuilder) AddTask(key string, task Task) *WorkflowBuilder { + *wb.workflow.Do = append(*wb.workflow.Do, &TaskItem{ + Key: key, + Task: task, + }) + return wb +} + +// SetInput sets the Input for the Workflow. +func (wb *WorkflowBuilder) SetInput(input *Input) *WorkflowBuilder { + wb.workflow.Input = input + return wb +} + +// SetOutput sets the Output for the Workflow. +func (wb *WorkflowBuilder) SetOutput(output *Output) *WorkflowBuilder { + wb.workflow.Output = output + return wb +} + +// SetTimeout sets the Timeout for the Workflow. +func (wb *WorkflowBuilder) SetTimeout(timeout *TimeoutOrReference) *WorkflowBuilder { + wb.workflow.Timeout = timeout + return wb +} + +// SetUse sets the Use section for the Workflow. +func (wb *WorkflowBuilder) SetUse(use *Use) *WorkflowBuilder { + wb.workflow.Use = use + return wb +} + +// SetSchedule sets the Schedule for the Workflow. +func (wb *WorkflowBuilder) SetSchedule(schedule *Schedule) *WorkflowBuilder { + wb.workflow.Schedule = schedule + return wb +} + +// Build returns the constructed Workflow object. +func (wb *WorkflowBuilder) Build() *Workflow { + return wb.workflow +} + +// ToYAML serializes the Workflow to YAML format. +func (wb *WorkflowBuilder) ToYAML() ([]byte, error) { + return yaml.Marshal(wb.workflow) +} + +// ToJSON serializes the Workflow to JSON format. +func (wb *WorkflowBuilder) ToJSON() ([]byte, error) { + return json.MarshalIndent(wb.workflow, "", " ") +} diff --git a/model/callback_state.go b/model/callback_state.go deleted file mode 100644 index 1dadcb6..0000000 --- a/model/callback_state.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// CallbackState executes a function and waits for callback event that indicates completion of the task. -type CallbackState struct { - // Defines the action to be executed. - // +kubebuilder:validation:Required - Action Action `json:"action"` - // References a unique callback event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Time period to wait for incoming events (ISO 8601 format) - // +optional - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` -} - -func (c *CallbackState) MarshalJSON() ([]byte, error) { - type Alias CallbackState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(c), - Timeouts: c.Timeouts, - }) - return custom, err -} - -// CallbackStateTimeout defines timeout settings for callback state -type CallbackStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/callback_state_validator_test.go b/model/callback_state_validator_test.go deleted file mode 100644 index a89cea9..0000000 --- a/model/callback_state_validator_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildCallbackState(workflow *Workflow, name, eventRef string) *State { - consumeEvent := Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, consumeEvent) - - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeCallback, - }, - CallbackState: &CallbackState{ - EventRef: eventRef, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func buildCallbackStateTimeout(callbackState *CallbackState) *CallbackStateTimeout { - callbackState.Timeouts = &CallbackStateTimeout{} - return callbackState.Timeouts -} - -func TestCallbackStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.EventRef = "" - return *model - }, - Err: `workflow.states[0].callbackState.eventRef is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestCallbackStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: `success`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: `omitempty`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "" - model.States[0].CallbackState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].CallbackState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].callbackState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].callbackState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/common.go b/model/common.go deleted file mode 100644 index 3d4f000..0000000 --- a/model/common.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// Common schema for Serverless Workflow specification -type Common struct { - // Metadata information - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` -} - -// Metadata information -// +kubebuilder:pruning:PreserveUnknownFields -// +kubebuilder:validation:Schemaless -type Metadata map[string]Object diff --git a/model/delay_state.go b/model/delay_state.go deleted file mode 100644 index 3227e74..0000000 --- a/model/delay_state.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "encoding/json" - -// DelayState Causes the workflow execution to delay for a specified duration -type DelayState struct { - // Amount of time (ISO 8601 format) to delay - // +kubebuilder:validation:Required - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` -} - -func (a *DelayState) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` - }{ - TimeDelay: a.TimeDelay, - }) - return custom, err -} diff --git a/model/delay_state_test.go b/model/delay_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/delay_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/delay_state_validator_test.go b/model/delay_state_validator_test.go deleted file mode 100644 index aed36c5..0000000 --- a/model/delay_state_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildDelayState(workflow *Workflow, name, timeDelay string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeDelay, - }, - DelayState: &DelayState{ - TimeDelay: timeDelay, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func TestDelayStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - delayState := buildDelayState(baseWorkflow, "start state", "PT5S") - buildEndByState(delayState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "P5S" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/doc.go b/model/doc.go deleted file mode 100644 index 1508354..0000000 --- a/model/doc.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// +k8s:deepcopy-gen=package -// +k8s:deepcopy-gen:nonpointer-interfaces=true diff --git a/model/endpoint.go b/model/endpoint.go new file mode 100644 index 0000000..9c59fb5 --- /dev/null +++ b/model/endpoint.go @@ -0,0 +1,184 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" + + "github.com/tidwall/gjson" +) + +// LiteralUriPattern matches standard URIs without placeholders. +var LiteralUriPattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://[^{}\s]+$`) + +// LiteralUriTemplatePattern matches URIs with placeholders. +var LiteralUriTemplatePattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*://.*\{.*}.*$`) + +// URITemplate represents a URI that can be a literal URI or a URI template. +type URITemplate interface { + IsURITemplate() bool + String() string +} + +// UnmarshalURITemplate is a shared function for unmarshalling URITemplate fields. +func UnmarshalURITemplate(data []byte) (URITemplate, error) { + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("failed to unmarshal URITemplate: %w", err) + } + + if LiteralUriTemplatePattern.MatchString(raw) { + return &LiteralUriTemplate{Value: raw}, nil + } + + if LiteralUriPattern.MatchString(raw) { + return &LiteralUri{Value: raw}, nil + } + + return nil, fmt.Errorf("invalid URI or URI template format: %s", raw) +} + +type LiteralUriTemplate struct { + Value string `json:"-" validate:"required,uri_template_pattern"` // Validate pattern for URI template. +} + +func (t *LiteralUriTemplate) IsURITemplate() bool { + return true +} + +func (t *LiteralUriTemplate) MarshalJSON() ([]byte, error) { + return json.Marshal(t.Value) +} + +func (t *LiteralUriTemplate) String() string { + return t.Value +} + +type LiteralUri struct { + Value string `json:"-" validate:"required,uri_pattern"` // Validate pattern for URI. +} + +func (u *LiteralUri) IsURITemplate() bool { + return true +} + +func (u *LiteralUri) MarshalJSON() ([]byte, error) { + return json.Marshal(u.Value) +} + +func (u *LiteralUri) String() string { + return u.Value +} + +type EndpointConfiguration struct { + URI URITemplate `json:"uri" validate:"required"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +// UnmarshalJSON implements custom unmarshalling for EndpointConfiguration. +func (e *EndpointConfiguration) UnmarshalJSON(data []byte) error { + // Use a temporary structure to unmarshal the JSON + type Alias EndpointConfiguration + temp := &struct { + URI json.RawMessage `json:"uri"` + *Alias + }{ + Alias: (*Alias)(e), + } + + if err := json.Unmarshal(data, &temp); err != nil { + return fmt.Errorf("failed to unmarshal EndpointConfiguration: %w", err) + } + + // Unmarshal the URI field into the appropriate URITemplate implementation + uri, err := UnmarshalURITemplate(temp.URI) + if err != nil { + return fmt.Errorf("invalid URI in EndpointConfiguration: %w", err) + } + e.URI = uri + + return nil +} + +type Endpoint struct { + RuntimeExpression *RuntimeExpression `json:"-"` + URITemplate URITemplate `json:"-"` + EndpointConfig *EndpointConfiguration `json:"-"` +} + +func NewEndpoint(uri string) *Endpoint { + return &Endpoint{URITemplate: &LiteralUri{Value: uri}} +} + +func (e *Endpoint) String() string { + if e.RuntimeExpression != nil { + return e.RuntimeExpression.String() + } + if e.URITemplate != nil { + return e.URITemplate.String() + } + if e.EndpointConfig != nil { + return e.EndpointConfig.URI.String() + } + return "" +} + +// UnmarshalJSON implements custom unmarshalling for Endpoint. +func (e *Endpoint) UnmarshalJSON(data []byte) error { + if gjson.ValidBytes(data) && gjson.ParseBytes(data).IsObject() && len(gjson.ParseBytes(data).Map()) == 0 { + // Leave the Endpoint fields unset (nil) + return nil + } + + // Then try to unmarshal as URITemplate + if uriTemplate, err := UnmarshalURITemplate(data); err == nil { + e.URITemplate = uriTemplate + return nil + } + + // First try to unmarshal as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + e.RuntimeExpression = &runtimeExpr + return nil + } + + // Finally, try to unmarshal as EndpointConfiguration + var endpointConfig EndpointConfiguration + if err := json.Unmarshal(data, &endpointConfig); err == nil { + e.EndpointConfig = &endpointConfig + return nil + } + + return errors.New("failed to unmarshal Endpoint: data does not match any known schema") +} + +// MarshalJSON implements custom marshalling for Endpoint. +func (e *Endpoint) MarshalJSON() ([]byte, error) { + if e.RuntimeExpression != nil { + return json.Marshal(e.RuntimeExpression) + } + if e.URITemplate != nil { + return json.Marshal(e.URITemplate) + } + if e.EndpointConfig != nil { + return json.Marshal(e.EndpointConfig) + } + // Return an empty JSON object when no fields are set + return []byte("{}"), nil +} diff --git a/model/endpoint_test.go b/model/endpoint_test.go new file mode 100644 index 0000000..59ddd45 --- /dev/null +++ b/model/endpoint_test.go @@ -0,0 +1,144 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEndpoint_UnmarshalJSON(t *testing.T) { + t.Run("Valid RuntimeExpression", func(t *testing.T) { + input := `"${example}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.RuntimeExpression, "RuntimeExpression should be set") + assert.Equal(t, "${example}", endpoint.RuntimeExpression.Value, "RuntimeExpression value should match") + }) + + t.Run("Invalid RuntimeExpression", func(t *testing.T) { + input := `"123invalid-expression"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid runtime expression") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + }) + + t.Run("Invalid LiteralUriTemplate", func(t *testing.T) { + uriTemplate := &LiteralUriTemplate{Value: "example.com/{id}"} + assert.False(t, LiteralUriPattern.MatchString(uriTemplate.Value), "LiteralUriTemplate should not match URI pattern") + }) + + t.Run("Valid URITemplate", func(t *testing.T) { + input := `"http://example.com/{id}"` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.URITemplate, "URITemplate should be set") + }) + + t.Run("Valid EndpointConfiguration", func(t *testing.T) { + input := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "admin", "password": "admin" } + } + }` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Username, "Authentication Username should match") + assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Password, "Authentication Password should match") + }) + + t.Run("Invalid JSON Structure", func(t *testing.T) { + input := `{"invalid": "data"}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.Error(t, err, "Unmarshal should return an error for invalid JSON structure") + }) + + t.Run("Empty Input", func(t *testing.T) { + input := `{}` + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error for empty input") + assert.Nil(t, endpoint.RuntimeExpression, "RuntimeExpression should not be set") + assert.Nil(t, endpoint.URITemplate, "URITemplate should not be set") + assert.Nil(t, endpoint.EndpointConfig, "EndpointConfig should not be set") + }) +} + +func TestEndpoint_MarshalJSON(t *testing.T) { + t.Run("Marshal RuntimeExpression", func(t *testing.T) { + endpoint := &Endpoint{ + RuntimeExpression: &RuntimeExpression{Value: "${example}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"${example}"`, string(data), "Output JSON should match") + }) + + t.Run("Marshal URITemplate", func(t *testing.T) { + endpoint := &Endpoint{ + URITemplate: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `"http://example.com/{id}"`, string(data), "Output JSON should match") + }) + + t.Run("Marshal EndpointConfiguration", func(t *testing.T) { + endpoint := &Endpoint{ + EndpointConfig: &EndpointConfiguration{ + URI: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + Authentication: &ReferenceableAuthenticationPolicy{AuthenticationPolicy: &AuthenticationPolicy{Basic: &BasicAuthenticationPolicy{ + Username: "john", + Password: "secret", + }}}, + }, + } + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + expected := `{ + "uri": "http://example.com/{id}", + "authentication": { + "basic": { "username": "john", "password": "secret" } + } + }` + assert.JSONEq(t, expected, string(data), "Output JSON should match") + }) + + t.Run("Marshal Empty Endpoint", func(t *testing.T) { + endpoint := Endpoint{} + + data, err := json.Marshal(endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, `{}`, string(data), "Output JSON should be empty") + }) +} diff --git a/model/event.go b/model/event.go deleted file mode 100644 index bad1ce4..0000000 --- a/model/event.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventKind defines this event as either `consumed` or `produced` -type EventKind string - -func (i EventKind) KindValues() []string { - return []string{ - string(EventKindConsumed), - string(EventKindProduced), - } -} - -func (i EventKind) String() string { - return string(i) -} - -const ( - // EventKindConsumed means the event continuation of workflow instance execution - EventKindConsumed EventKind = "consumed" - - // EventKindProduced means the event was created during workflow instance execution - EventKindProduced EventKind = "produced" -) - -// Event used to define events and their correlations -// +builder-gen:new-call=ApplyDefault -type Event struct { - Common `json:",inline"` - // Unique event name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // CloudEvent source. - // +optional - Source string `json:"source,omitempty"` - // CloudEvent type. - // +kubebuilder:validation:Required - Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Defaults to `consumed`. - // +kubebuilder:validation:Enum=consumed;produced - // +kubebuilder:default=consumed - Kind EventKind `json:"kind,omitempty" validate:"required,oneofkind"` - // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload - // and context attributes should be accessible. Defaults to true. - // +kubebuilder:default=true - // +optional - DataOnly bool `json:"dataOnly,omitempty"` - // Define event correlation rules for this event. Only used for consumed events. - // +optional - Correlation []Correlation `json:"correlation,omitempty" validate:"dive"` -} - -type eventUnmarshal Event - -// UnmarshalJSON unmarshal Event object from json bytes -func (e *Event) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("event", data, (*eventUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event -func (e *Event) ApplyDefault() { - e.DataOnly = true - e.Kind = EventKindConsumed -} - -// Correlation define event correlation rules for an event. Only used for `consumed` events -type Correlation struct { - // CloudEvent Extension Context Attribute name - // +kubebuilder:validation:Required - ContextAttributeName string `json:"contextAttributeName" validate:"required"` - // CloudEvent Extension Context Attribute value - // +optional - ContextAttributeValue string `json:"contextAttributeValue,omitempty"` -} - -// EventRef defining invocation of a function via event -// +builder-gen:new-call=ApplyDefault -type EventRef struct { - // Reference to the unique name of a 'produced' event definition, - // +kubebuilder:validation:Required - TriggerEventRef string `json:"triggerEventRef" validate:"required"` - // Reference to the unique name of a 'consumed' event definition - // +kubebuilder:validation:Required - ResultEventRef string `json:"resultEventRef" validate:"required"` - // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - // actionExecutionTimeout - // +optional - ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // If string type, an expression which selects parts of the states data output to become the data (payload) - // of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - // of the event referenced by triggerEventRef. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data *Object `json:"data,omitempty"` - // Add additional extension context attributes to the produced event. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type eventRefUnmarshal EventRef - -// UnmarshalJSON implements json.Unmarshaler -func (e *EventRef) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event Ref -func (e *EventRef) ApplyDefault() { - e.Invoke = InvokeKindSync -} diff --git a/model/event_data_filter.go b/model/event_data_filter.go deleted file mode 100644 index 1db5bbf..0000000 --- a/model/event_data_filter.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventDataFilter used to filter consumed event payloads. -// +builder-gen:new-call=ApplyDefault -type EventDataFilter struct { - // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - // should be ignored. Default is true. - // +optional - UseData bool `json:"useData,omitempty"` - // Workflow expression that filters of the event data (payload). - // +optional - Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. - // If not specified denotes the top-level state data element - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type eventDataFilterUnmarshal EventDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (f *EventDataFilter) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) -} - -// ApplyDefault set the default values for Event Data Filter -func (f *EventDataFilter) ApplyDefault() { - f.UseData = true -} diff --git a/model/event_data_filter_test.go b/model/event_data_filter_test.go deleted file mode 100644 index e4bf979..0000000 --- a/model/event_data_filter_test.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: true, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"UseData": false, "data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: false, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: EventDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"data": 1, "toStateData": "2"}`, - expect: EventDataFilter{}, - err: `eventDataFilter.data must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_data_filter_validator_test.go b/model/event_data_filter_validator_test.go deleted file mode 100644 index 1bbbac9..0000000 --- a/model/event_data_filter_validator_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestEventDataFilterStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event_state.go b/model/event_state.go deleted file mode 100644 index 39bd590..0000000 --- a/model/event_state.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// EventState await one or more events and perform actions when they are received. If defined as the -// workflow starting state, the event state definition controls when the workflow instances should be created. -// +builder-gen:new-call=ApplyDefault -type EventState struct { - // TODO: EventState doesn't have usedForCompensation field. - - // If true consuming one of the defined events causes its associated actions to be performed. If false all - // the defined events must be consumed in order for actions to be performed. Defaults to true. - // +kubebuilder:default=true - // +optional - Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed. - // +kubebuilder:validation:MinItems=1 - OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts. - // +optional - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` -} - -func (e *EventState) MarshalJSON() ([]byte, error) { - type Alias EventState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(e), - Timeouts: e.Timeouts, - }) - return custom, err -} - -type eventStateUnmarshal EventState - -// UnmarshalJSON unmarshal EventState object from json bytes -func (e *EventState) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event State -func (e *EventState) ApplyDefault() { - e.Exclusive = true -} - -// OnEvents define which actions are be performed for the one or more events. -// +builder-gen:new-call=ApplyDefault -type OnEvents struct { - // References one or more unique event names in the defined workflow events. - // +kubebuilder:validation:MinItems=1 - EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Should actions be performed sequentially or in parallel. Default is sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed if expression matches - // +optional - Actions []Action `json:"actions,omitempty" validate:"dive"` - // eventDataFilter defines the callback event data filter definition - // +optional - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -type onEventsUnmarshal OnEvents - -// UnmarshalJSON unmarshal OnEvents object from json bytes -func (o *OnEvents) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) -} - -// ApplyDefault set the default values for On Events -func (o *OnEvents) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// EventStateTimeout defines timeout settings for event state -type EventStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/event_state_test.go b/model/event_state_test.go deleted file mode 100644 index 348aaea..0000000 --- a/model/event_state_test.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect State - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"name": "1", "type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: false, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - { - desp: "default exclusive", - data: `{"name": "1", "type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: true, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := State{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestOnEventsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OnEvents - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "default action mode", - data: `{"eventRefs": ["E1", "E2"]}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{}, - err: `invalid character ':' after top-level value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OnEvents{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_state_validator.go b/model/event_state_validator.go deleted file mode 100644 index d4f2f40..0000000 --- a/model/event_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStateStructLevelValidationCtx), EventState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onEventsStructLevelValidationCtx), OnEvents{}) -} - -func eventStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // EventRefs -} - -func onEventsStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onEvent := structLevel.Current().Interface().(OnEvents) - for _, eventRef := range onEvent.EventRefs { - if eventRef != "" && !ctx.ExistEvent(eventRef) { - structLevel.ReportError(eventRef, "eventRefs", "EventRefs", val.TagExists, "") - } - } -} diff --git a/model/event_state_validator_test.go b/model/event_state_validator_test.go deleted file mode 100644 index ea7d319..0000000 --- a/model/event_state_validator_test.go +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildEventState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeEvent, - }, - EventState: &EventState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOnEvents(workflow *Workflow, state *State, name string) *OnEvents { - event := Event{ - Name: name, - Type: "type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, event) - - state.EventState.OnEvents = append(state.EventState.OnEvents, OnEvents{ - EventRefs: []string{event.Name}, - ActionMode: ActionModeParallel, - }) - - return &state.EventState.OnEvents[len(state.EventState.OnEvents)-1] -} - -func buildEventStateTimeout(state *State) *EventStateTimeout { - state.EventState.Timeouts = &EventStateTimeout{ - ActionExecTimeout: "PT5S", - EventTimeout: "PT5S", - } - return state.EventState.Timeouts -} - -func TestEventStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = nil - return *model - }, - Err: `workflow.states[0].eventState.onEvents is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = []OnEvents{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents must have the minimum 1`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestOnEventsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{"event not found"} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = nil - model.States[0].EventState.OnEvents[0].ActionMode = "" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs is required -workflow.states[0].eventState.onEvents[0].actionMode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs must have the minimum 1`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].actionMode need by one of [sequential parallel]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestEventStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildEventStateTimeout(eventState) - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "" - model.States[0].EventState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].EventState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].eventState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].eventState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event_test.go b/model/event_test.go deleted file mode 100644 index f557c61..0000000 --- a/model/event_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventRef - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"invoke": "async"}`, - expect: EventRef{ - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "invoke unset", - data: `{}`, - expect: EventRef{ - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"invoke": 1}`, - expect: EventRef{}, - err: `eventRef.invoke must be sync or async`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestEventUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Event - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"dataOnly": false, "kind": "produced"}`, - expect: Event{ - DataOnly: false, - Kind: EventKindProduced, - }, - err: ``, - }, - { - desp: "optional field dataOnly & kind unset", - data: `{}`, - expect: Event{ - DataOnly: true, - Kind: EventKindConsumed, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"dataOnly": "false", "kind": "produced"}`, - expect: Event{}, - err: `event.dataOnly must be bool`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Event - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_validator.go b/model/event_validator.go deleted file mode 100644 index 7b4daa9..0000000 --- a/model/event_validator.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStructLevelValidation), Event{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventRefStructLevelValidation), EventRef{}) -} - -// eventStructLevelValidation custom validator for event kind consumed -func eventStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { -} - -func eventRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - model := structLevel.Current().Interface().(EventRef) - if model.TriggerEventRef != "" && !ctx.ExistEvent(model.TriggerEventRef) { - structLevel.ReportError(model.TriggerEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } - if model.ResultEventRef != "" && !ctx.ExistEvent(model.ResultEventRef) { - structLevel.ReportError(model.ResultEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } -} diff --git a/model/event_validator_test.go b/model/event_validator_test.go deleted file mode 100644 index 80340b0..0000000 --- a/model/event_validator_test.go +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildEventRef(workflow *Workflow, action *Action, triggerEvent, resultEvent string) *EventRef { - produceEvent := Event{ - Name: triggerEvent, - Type: "event type", - Kind: EventKindProduced, - } - - consumeEvent := Event{ - Name: resultEvent, - Type: "event type", - Kind: EventKindProduced, - } - - workflow.Events = append(workflow.Events, produceEvent) - workflow.Events = append(workflow.Events, consumeEvent) - - eventRef := &EventRef{ - TriggerEventRef: triggerEvent, - ResultEventRef: resultEvent, - Invoke: InvokeKindSync, - } - - action.EventRef = eventRef - return action.EventRef -} - -func buildCorrelation(event *Event) *Correlation { - event.Correlation = append(event.Correlation, Correlation{ - ContextAttributeName: "attribute name", - }) - - return &event.Correlation[len(event.Correlation)-1] -} - -func TestEventStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events = append(model.Events, model.Events[0]) - return *model - }, - Err: `workflow.events has duplicate "name"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Name = "" - model.Events[0].Type = "" - model.Events[0].Kind = "" - return *model - }, - Err: `workflow.events[0].name is required -workflow.events[0].type is required -workflow.events[0].kind is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Kind = EventKindConsumed + "invalid" - return *model - }, - Err: `workflow.events[0].kind need by one of [consumed produced]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestCorrelationStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - buildCorrelation(&baseWorkflow.Events[0]) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "empty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation = nil - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation[0].ContextAttributeName = "" - return *model - }, - Err: `workflow.events[0].correlation[0].contextAttributeName is required`, - }, - //TODO: Add test: correlation only used for `consumed` events - } - - StructLevelValidationCtx(t, testCases) -} - -func TestEventRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - eventRef := buildEventRef(baseWorkflow, action1, "event 1", "event 2") - eventRef.ResultEventTimeout = "PT1H" - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef is required -workflow.states[0].actions[0].eventRef.resultEventRef is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "invalid event" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "invalid event 2" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event" -workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event 2"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.ResultEventTimeout = "10hs" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.resultEventTimeout invalid iso8601 duration "10hs"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/extension.go b/model/extension.go new file mode 100644 index 0000000..b7b49ec --- /dev/null +++ b/model/extension.go @@ -0,0 +1,120 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Extension represents the definition of an extension. +type Extension struct { + Extend string `json:"extend" validate:"required,oneof=call composite emit for listen raise run set switch try wait all"` + When *RuntimeExpression `json:"when,omitempty"` + Before *TaskList `json:"before,omitempty" validate:"omitempty,dive"` + After *TaskList `json:"after,omitempty" validate:"omitempty,dive"` +} + +// ExtensionItem represents a named extension and its associated definition. +type ExtensionItem struct { + Key string `json:"-" validate:"required"` + Extension *Extension `json:"-" validate:"required"` +} + +// MarshalJSON for ExtensionItem to serialize as a single-key object. +func (ei *ExtensionItem) MarshalJSON() ([]byte, error) { + if ei == nil { + return nil, fmt.Errorf("cannot marshal a nil ExtensionItem") + } + + extensionJSON, err := json.Marshal(ei.Extension) + if err != nil { + return nil, fmt.Errorf("failed to marshal extension: %w", err) + } + + return json.Marshal(map[string]json.RawMessage{ + ei.Key: extensionJSON, + }) +} + +// UnmarshalJSON for ExtensionItem to deserialize from a single-key object. +func (ei *ExtensionItem) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionItem: %w", err) + } + + if len(raw) != 1 { + return fmt.Errorf("each ExtensionItem must have exactly one key") + } + + for key, extensionData := range raw { + var ext Extension + if err := json.Unmarshal(extensionData, &ext); err != nil { + return fmt.Errorf("failed to unmarshal extension %q: %w", key, err) + } + ei.Key = key + ei.Extension = &ext + break + } + + return nil +} + +// ExtensionList represents a list of extensions. +type ExtensionList []*ExtensionItem + +// Key retrieves all extensions with the specified key. +func (el *ExtensionList) Key(key string) *Extension { + for _, item := range *el { + if item.Key == key { + return item.Extension + } + } + return nil +} + +// UnmarshalJSON for ExtensionList to deserialize an array of ExtensionItem objects. +func (el *ExtensionList) UnmarshalJSON(data []byte) error { + var rawExtensions []json.RawMessage + if err := json.Unmarshal(data, &rawExtensions); err != nil { + return fmt.Errorf("failed to unmarshal ExtensionList: %w", err) + } + + for _, raw := range rawExtensions { + var item ExtensionItem + if err := json.Unmarshal(raw, &item); err != nil { + return fmt.Errorf("failed to unmarshal extension item: %w", err) + } + *el = append(*el, &item) + } + + return nil +} + +// MarshalJSON for ExtensionList to serialize as an array of ExtensionItem objects. +func (el *ExtensionList) MarshalJSON() ([]byte, error) { + var serializedExtensions []json.RawMessage + + for _, item := range *el { + serialized, err := json.Marshal(item) + if err != nil { + return nil, fmt.Errorf("failed to marshal ExtensionItem: %w", err) + } + serializedExtensions = append(serializedExtensions, serialized) + } + + return json.Marshal(serializedExtensions) +} diff --git a/model/extension_test.go b/model/extension_test.go new file mode 100644 index 0000000..7a11a5f --- /dev/null +++ b/model/extension_test.go @@ -0,0 +1,140 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestExtension_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var extension Extension + err := json.Unmarshal([]byte(jsonData), &extension) + assert.NoError(t, err) + assert.Equal(t, "call", extension.Extend) + assert.Equal(t, NewExpr("${condition}"), extension.When) + + task1 := extension.Before.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + // Check if task2 exists before accessing its fields + task2 := extension.After.Key("task2") + assert.NotNil(t, task2, "task2 should not be nil") + openAPITask := task2.AsCallOpenAPITask() + assert.NotNil(t, openAPITask) + assert.Equal(t, "openapi", openAPITask.Call) + assert.Equal(t, "doc1", openAPITask.With.Document.Name) + assert.Equal(t, "op1", openAPITask.With.OperationID) +} + +func TestExtension_MarshalJSON(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(extension) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "extend": "call", + "when": "${condition}", + "before": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}} + ], + "after": [ + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestExtension_Validation(t *testing.T) { + extension := Extension{ + Extend: "call", + When: NewExpr("${condition}"), + Before: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + }, + After: &TaskList{ + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "doc1", // Missing Endpoint + }, + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(extension) + assert.Error(t, err) + + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s': %s", + validationErr.StructNamespace(), validationErr.Tag(), validationErr.Param()) + } + + // Assert on specific validation errors + assert.Contains(t, validationErrors.Error(), "After[0].Task.With.Document.Endpoint") + assert.Contains(t, validationErrors.Error(), "required") + } else { + t.Errorf("Unexpected error type: %v", err) + } +} diff --git a/model/foreach_state.go b/model/foreach_state.go deleted file mode 100644 index aa19f4e..0000000 --- a/model/foreach_state.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) -type ForEachModeType string - -func (f ForEachModeType) KindValues() []string { - return []string{ - string(ForEachModeTypeSequential), - string(ForEachModeTypeParallel), - } -} - -func (f ForEachModeType) String() string { - return string(f) -} - -const ( - // ForEachModeTypeSequential specifies iterations should be done sequentially. - ForEachModeTypeSequential ForEachModeType = "sequential" - // ForEachModeTypeParallel specifies iterations should be done parallel. - ForEachModeTypeParallel ForEachModeType = "parallel" -) - -// ForEachState used to execute actions for each element of a data set. -// +builder-gen:new-call=ApplyDefault -type ForEachState struct { - // Workflow expression selecting an array element of the states' data. - // +kubebuilder:validation:Required - InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration. - // +optional - OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, - // this param should contain a unique element of the inputCollection array. - // +optional - IterationParam string `json:"iterationParam,omitempty"` - // Specifies how many iterations may run in parallel at the same time. Used if mode property is set to - // parallel (default). If not specified, its value should be the size of the inputCollection. - // +optional - BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection. - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions,omitempty" validate:"required,min=0,dive"` - // State specific timeout. - // +optional - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=parallel - Mode ForEachModeType `json:"mode,omitempty" validate:"required,oneofkind"` -} - -func (f *ForEachState) MarshalJSON() ([]byte, error) { - type Alias ForEachState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(f), - Timeouts: f.Timeouts, - }) - return custom, err -} - -type forEachStateUnmarshal ForEachState - -// UnmarshalJSON implements json.Unmarshaler -func (f *ForEachState) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) -} - -// ApplyDefault set the default values for ForEach State -func (f *ForEachState) ApplyDefault() { - f.Mode = ForEachModeTypeParallel -} - -// ForEachStateTimeout defines timeout settings for foreach state -type ForEachStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go deleted file mode 100644 index a10f7a9..0000000 --- a/model/foreach_state_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestForEachStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ForEachState - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"mode": "sequential"}`, - expect: &ForEachState{ - Mode: ForEachModeTypeSequential, - }, - err: ``, - }, - { - desp: "mode unset", - data: `{}`, - expect: &ForEachState{ - Mode: ForEachModeTypeParallel, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"mode": 1}`, - expect: nil, - err: `forEachState.mode must be sequential or parallel`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ForEachState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/foreach_state_validator.go b/model/foreach_state_validator.go deleted file mode 100644 index d1d9894..0000000 --- a/model/foreach_state_validator.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(forEachStateStructLevelValidation, ForEachState{}) -} - -// ForEachStateStructLevelValidation custom validator for ForEachState -func forEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - stateObj := structLevel.Current().Interface().(ForEachState) - - if stateObj.Mode != ForEachModeTypeParallel { - return - } - - if stateObj.BatchSize == nil { - return - } - - if !val.ValidateGt0IntStr(stateObj.BatchSize) { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } -} diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go deleted file mode 100644 index 8fb49d0..0000000 --- a/model/foreach_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildForEachState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeForEach, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Mode: ForEachModeTypeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func TestForEachStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - forEachState := buildForEachState(baseWorkflow, "start state") - buildEndByState(forEachState, true, false) - action1 := buildActionByForEachState(forEachState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - } - return *model - }, - }, - { - Desp: "success without batch size", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = nil - return *model - }, - }, - { - Desp: "gt0 int", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - } - return *model - }, - Err: `workflow.states[0].forEachState.batchSize must be greater than 0`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].forEachState.mode need by one of [sequential parallel]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.InputCollection = "" - model.States[0].ForEachState.Mode = "" - model.States[0].ForEachState.Actions = nil - return *model - }, - Err: `workflow.states[0].forEachState.inputCollection is required -workflow.states[0].forEachState.actions is required -workflow.states[0].forEachState.mode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Actions = []Action{} - return *model - }, - Err: ``, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestForEachStateTimeoutStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} diff --git a/model/function.go b/model/function.go deleted file mode 100644 index 7cf4197..0000000 --- a/model/function.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -const ( - // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service - // operation that needs to be invoked, separated by a '#'. - FunctionTypeREST FunctionType = "rest" - // FunctionTypeRPC a combination of the gRPC proto document URI and the particular service name and service method - // name that needs to be invoked, separated by a '#'. - FunctionTypeRPC FunctionType = "rpc" - // FunctionTypeExpression defines the expression syntax. - FunctionTypeExpression FunctionType = "expression" - // FunctionTypeGraphQL a combination of the GraphQL schema definition URI and the particular service name and - // service method name that needs to be invoked, separated by a '#' - FunctionTypeGraphQL FunctionType = "graphql" - // FunctionTypeAsyncAPI a combination of the AsyncApi definition document URI and the particular service operation - // that needs to be invoked, separated by a '#' - FunctionTypeAsyncAPI FunctionType = "asyncapi" - // FunctionTypeOData a combination of the GraphQL schema definition URI and the particular service name and service - // method name that needs to be invoked, separated by a '#' - FunctionTypeOData FunctionType = "odata" - // FunctionTypeCustom property defines a list of function types that are set by the specification. Some runtime - // implementations might support additional function types that extend the ones defined in the specification - FunctionTypeCustom FunctionType = "custom" -) - -// FunctionType ... -type FunctionType string - -func (i FunctionType) KindValues() []string { - return []string{ - string(FunctionTypeREST), - string(FunctionTypeRPC), - string(FunctionTypeExpression), - string(FunctionTypeGraphQL), - string(FunctionTypeAsyncAPI), - string(FunctionTypeOData), - string(FunctionTypeCustom), - } -} - -func (i FunctionType) String() string { - return string(i) -} - -// Function ... -// +builder-gen:new-call=ApplyDefault -type Function struct { - Common `json:",inline"` - // Unique function name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // If type is `rest`, #. - // If type is `rpc`, ##. - // If type is `expression`, defines the workflow expression. If the type is `custom`, - // #. - // +kubebuilder:validation:Required - Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. - // Default is `rest`. - // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;odata;asyncapi;custom - // +kubebuilder:default=rest - Type FunctionType `json:"type,omitempty" validate:"required,oneofkind"` - // References an auth definition name to be used to access to resource defined in the operation parameter. - // +optional - AuthRef string `json:"authRef,omitempty"` -} - -type functionUnmarshal Function - -// UnmarshalJSON implements json unmarshaler interface -func (f *Function) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("function", data, (*functionUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function -func (f *Function) ApplyDefault() { - f.Type = FunctionTypeREST -} diff --git a/model/function_validator_test.go b/model/function_validator_test.go deleted file mode 100644 index fcde6b9..0000000 --- a/model/function_validator_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestFunctionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Functions = Functions{{ - Name: "function 1", - Operation: "http://function/action", - Type: FunctionTypeREST, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Name = "" - model.Functions[0].Operation = "" - model.Functions[0].Type = "" - return *model - }, - Err: `workflow.functions[0].name is required -workflow.functions[0].operation is required -workflow.functions[0].type is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions = append(model.Functions, model.Functions[0]) - return *model - }, - Err: `workflow.functions has duplicate "name"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Type = FunctionTypeREST + "invalid" - return *model - }, - Err: `workflow.functions[0].type need by one of [rest rpc expression graphql asyncapi odata custom]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/inject_state.go b/model/inject_state.go deleted file mode 100644 index e3995c8..0000000 --- a/model/inject_state.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// InjectState used to inject static data into state data input. -type InjectState struct { - // JSON object which can be set as state's data input and can be manipulated via filter - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data map[string]Object `json:"data" validate:"required,min=1"` - // State specific timeouts - // +optional - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` -} - -func (i *InjectState) MarshalJSON() ([]byte, error) { - type Alias InjectState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(i), - Timeouts: i.Timeouts, - }) - return custom, err -} - -// InjectStateTimeout defines timeout settings for inject state -type InjectStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/inject_state_validator_test.go b/model/inject_state_validator_test.go deleted file mode 100644 index a8f127c..0000000 --- a/model/inject_state_validator_test.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestInjectStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} - -func TestInjectStateTimeoutStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/object.go b/model/object.go deleted file mode 100644 index e19d7b0..0000000 --- a/model/object.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "bytes" - "encoding/json" - "fmt" - "math" - "strconv" -) - -type Type int8 - -const ( - Null Type = iota - String - Int - Float - Map - Slice - Bool -) - -// Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. -// The DeepCopy tool allow us to easily import the Workflow types into a Kubernetes operator, -// which requires the DeepCopy method. -// -// It can marshal and unmarshal any type. -// This object type can be three types: -// - String - holds string values -// - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is -// parsed to int32 -// -// +kubebuilder:pruning:PreserveUnknownFields -// +kubebuilder:validation:Schemaless -type Object struct { - Type Type `json:"type,inline"` - StringValue string `json:"strVal,inline"` - IntValue int32 `json:"intVal,inline"` - FloatValue float64 `json:"floatVal,inline"` - MapValue map[string]Object `json:"mapVal,inline"` - SliceValue []Object `json:"sliceVal,inline"` - BoolValue bool `json:"boolValue,inline"` -} - -// UnmarshalJSON implements json.Unmarshaler -func (obj *Object) UnmarshalJSON(data []byte) error { - data = bytes.TrimSpace(data) - - if data[0] == '"' { - obj.Type = String - return json.Unmarshal(data, &obj.StringValue) - } else if data[0] == 't' || data[0] == 'f' { - obj.Type = Bool - return json.Unmarshal(data, &obj.BoolValue) - } else if data[0] == 'n' { - obj.Type = Null - return nil - } else if data[0] == '{' { - obj.Type = Map - return json.Unmarshal(data, &obj.MapValue) - } else if data[0] == '[' { - obj.Type = Slice - return json.Unmarshal(data, &obj.SliceValue) - } - - number := string(data) - intValue, err := strconv.ParseInt(number, 10, 32) - if err == nil { - obj.Type = Int - obj.IntValue = int32(intValue) - return nil - } - - floatValue, err := strconv.ParseFloat(number, 64) - if err == nil { - obj.Type = Float - obj.FloatValue = floatValue - return nil - } - - return fmt.Errorf("json invalid number %q", number) -} - -// MarshalJSON marshal the given json object into the respective Object subtype. -func (obj Object) MarshalJSON() ([]byte, error) { - switch obj.Type { - case String: - return []byte(fmt.Sprintf(`%q`, obj.StringValue)), nil - case Int: - return []byte(fmt.Sprintf(`%d`, obj.IntValue)), nil - case Float: - return []byte(fmt.Sprintf(`%f`, obj.FloatValue)), nil - case Map: - return json.Marshal(obj.MapValue) - case Slice: - return json.Marshal(obj.SliceValue) - case Bool: - return []byte(fmt.Sprintf(`%t`, obj.BoolValue)), nil - case Null: - return []byte("null"), nil - default: - panic("object invalid type") - } -} - -func FromString(val string) Object { - return Object{Type: String, StringValue: val} -} - -func FromInt(val int) Object { - if val > math.MaxInt32 || val < math.MinInt32 { - fmt.Println(fmt.Errorf("value: %d overflows int32", val)) - } - return Object{Type: Int, IntValue: int32(val)} -} - -func FromFloat(val float64) Object { - if val > math.MaxFloat64 || val < -math.MaxFloat64 { - fmt.Println(fmt.Errorf("value: %f overflows float64", val)) - } - return Object{Type: Float, FloatValue: float64(val)} -} - -func FromMap(mapValue map[string]any) Object { - mapValueObject := make(map[string]Object, len(mapValue)) - for key, value := range mapValue { - mapValueObject[key] = FromInterface(value) - } - return Object{Type: Map, MapValue: mapValueObject} -} - -func FromSlice(sliceValue []any) Object { - sliceValueObject := make([]Object, len(sliceValue)) - for key, value := range sliceValue { - sliceValueObject[key] = FromInterface(value) - } - return Object{Type: Slice, SliceValue: sliceValueObject} -} - -func FromBool(val bool) Object { - return Object{Type: Bool, BoolValue: val} -} - -func FromNull() Object { - return Object{Type: Null} -} - -func FromInterface(value any) Object { - switch v := value.(type) { - case string: - return FromString(v) - case int: - return FromInt(v) - case int32: - return FromInt(int(v)) - case float64: - return FromFloat(v) - case map[string]any: - return FromMap(v) - case []any: - return FromSlice(v) - case bool: - return FromBool(v) - case nil: - return FromNull() - } - panic("invalid type") -} - -func ToInterface(object Object) any { - switch object.Type { - case String: - return object.StringValue - case Int: - return object.IntValue - case Float: - return object.FloatValue - case Map: - mapInterface := make(map[string]any, len(object.MapValue)) - for key, value := range object.MapValue { - mapInterface[key] = ToInterface(value) - } - return mapInterface - case Slice: - sliceInterface := make([]any, len(object.SliceValue)) - for key, value := range object.SliceValue { - sliceInterface[key] = ToInterface(value) - } - return sliceInterface - case Bool: - return object.BoolValue - case Null: - return nil - } - panic("invalid type") -} diff --git a/model/object_test.go b/model/object_test.go deleted file mode 100644 index 0cf928f..0000000 --- a/model/object_test.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func Test_unmarshal(t *testing.T) { - testCases := []struct { - name string - json string - object Object - any any - err string - }{ - { - name: "string", - json: "\"value\"", - object: FromString("value"), - any: any("value"), - }, - { - name: "int", - json: "123", - object: FromInt(123), - any: any(int32(123)), - }, - { - name: "float", - json: "123.123", - object: FromFloat(123.123), - any: any(123.123), - }, - { - name: "map", - json: "{\"key\": \"value\", \"key2\": 123}", - object: FromMap(map[string]any{"key": "value", "key2": 123}), - any: any(map[string]any{"key": "value", "key2": int32(123)}), - }, - { - name: "slice", - json: "[\"key\", 123]", - object: FromSlice([]any{"key", 123}), - any: any([]any{"key", int32(123)}), - }, - { - name: "bool true", - json: "true", - object: FromBool(true), - any: any(true), - }, - { - name: "bool false", - json: "false", - object: FromBool(false), - any: any(false), - }, - { - name: "null", - json: "null", - object: FromNull(), - any: nil, - }, - { - name: "string invalid", - json: "\"invalid", - err: "unexpected end of JSON input", - }, - { - name: "number invalid", - json: "123a", - err: "invalid character 'a' after top-level value", - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - o := Object{} - err := json.Unmarshal([]byte(tc.json), &o) - if tc.err == "" { - assert.NoError(t, err) - assert.Equal(t, tc.object, o) - assert.Equal(t, ToInterface(tc.object), tc.any) - } else { - assert.Equal(t, tc.err, err.Error()) - } - }) - } -} - -func Test_marshal(t *testing.T) { - testCases := []struct { - name string - json string - object Object - err string - }{ - { - name: "string", - json: "\"value\"", - object: FromString("value"), - }, - { - name: "int", - json: "123", - object: FromInt(123), - }, - { - name: "float", - json: "123.123000", - object: FromFloat(123.123), - }, - { - name: "map", - json: "{\"key\":\"value\",\"key2\":123}", - object: FromMap(map[string]any{"key": "value", "key2": 123}), - }, - { - name: "slice", - json: "[\"key\",123]", - object: FromSlice([]any{"key", 123}), - }, - { - name: "bool true", - json: "true", - object: FromBool(true), - }, - { - name: "bool false", - json: "false", - object: FromBool(false), - }, - { - name: "null", - json: "null", - object: FromNull(), - }, - { - name: "interface", - json: "[\"value\",123,123.123000,[1],{\"key\":1.100000},true,false,null]", - object: FromInterface([]any{ - "value", - 123, - 123.123, - []any{1}, - map[string]any{"key": 1.1}, - true, - false, - nil, - }), - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - json, err := json.Marshal(tc.object) - if tc.err == "" { - assert.NoError(t, err) - assert.Equal(t, tc.json, string(json)) - } else { - assert.Equal(t, tc.err, err.Error()) - } - }) - } -} diff --git a/model/objects.go b/model/objects.go new file mode 100644 index 0000000..ecfba00 --- /dev/null +++ b/model/objects.go @@ -0,0 +1,260 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" +) + +// ObjectOrString is a type that can hold either a string or an object. +type ObjectOrString struct { + Value interface{} `validate:"object_or_string"` +} + +// UnmarshalJSON unmarshals data into either a string or an object. +func (o *ObjectOrString) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + o.Value = asString + return nil + } + + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + return errors.New("ObjectOrString must be a string or an object") +} + +// MarshalJSON marshals ObjectOrString into JSON. +func (o *ObjectOrString) MarshalJSON() ([]byte, error) { + return json.Marshal(o.Value) +} + +// ObjectOrRuntimeExpr is a type that can hold either a RuntimeExpression or an object. +type ObjectOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"object_or_runtime_expr"` // Custom validation tag. +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or an object. +func (o *ObjectOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + o.Value = runtimeExpr + return nil + } + + // Attempt to decode as a generic object + var asObject map[string]interface{} + if err := json.Unmarshal(data, &asObject); err == nil { + o.Value = asObject + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("ObjectOrRuntimeExpr must be a runtime expression or an object") +} + +// MarshalJSON marshals ObjectOrRuntimeExpr into JSON. +func (o *ObjectOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := o.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case map[string]interface{}: + return json.Marshal(v) + default: + return nil, fmt.Errorf("ObjectOrRuntimeExpr contains unsupported type") + } +} + +// Validate validates the ObjectOrRuntimeExpr using the custom validation logic. +func (o *ObjectOrRuntimeExpr) Validate() error { + switch v := o.Value.(type) { + case RuntimeExpression: + if !v.IsValid() { + return fmt.Errorf("invalid runtime expression: %s", v.Value) + } + case map[string]interface{}: + if len(v) == 0 { + return fmt.Errorf("object cannot be empty") + } + default: + return fmt.Errorf("unsupported value type for ObjectOrRuntimeExpr") + } + return nil +} + +// StringOrRuntimeExpr is a type that can hold either a RuntimeExpression or a string. +type StringOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"string_or_runtime_expr"` // Custom validation tag. +} + +// UnmarshalJSON unmarshals data into either a RuntimeExpression or a string. +func (s *StringOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as a RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + s.Value = runtimeExpr + return nil + } + + // Attempt to decode as a string + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + s.Value = asString + return nil + } + + // If neither succeeds, return an error + return fmt.Errorf("StringOrRuntimeExpr must be a runtime expression or a string") +} + +// MarshalJSON marshals StringOrRuntimeExpr into JSON. +func (s *StringOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := s.Value.(type) { + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + return json.Marshal(v) + default: + return nil, fmt.Errorf("StringOrRuntimeExpr contains unsupported type") + } +} + +func (s *StringOrRuntimeExpr) String() string { + switch v := s.Value.(type) { + case RuntimeExpression: + return v.String() + case string: + return v + default: + return "" + } +} + +// URITemplateOrRuntimeExpr represents a type that can be a URITemplate or a RuntimeExpression. +type URITemplateOrRuntimeExpr struct { + Value interface{} `json:"-" validate:"uri_template_or_runtime_expr"` // Custom validation. +} + +func NewUriTemplate(uriTemplate string) *URITemplateOrRuntimeExpr { + return &URITemplateOrRuntimeExpr{ + Value: uriTemplate, + } +} + +// UnmarshalJSON unmarshals data into either a URITemplate or a RuntimeExpression. +func (u *URITemplateOrRuntimeExpr) UnmarshalJSON(data []byte) error { + // Attempt to decode as URITemplate + uriTemplate, err := UnmarshalURITemplate(data) + if err == nil { + u.Value = uriTemplate + return nil + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + u.Value = runtimeExpr + return nil + } + + // Return an error if neither succeeds + return fmt.Errorf("URITemplateOrRuntimeExpr must be a valid URITemplate or RuntimeExpression") +} + +// MarshalJSON marshals URITemplateOrRuntimeExpr into JSON. +func (u *URITemplateOrRuntimeExpr) MarshalJSON() ([]byte, error) { + switch v := u.Value.(type) { + case URITemplate: + return json.Marshal(v.String()) + case RuntimeExpression: + return json.Marshal(v.String()) + case string: + // Attempt to marshal as RuntimeExpression + runtimeExpr := RuntimeExpression{Value: v} + if runtimeExpr.IsValid() { + return json.Marshal(runtimeExpr.String()) + } + // Otherwise, treat as a Literal URI + uriTemplate, err := UnmarshalURITemplate([]byte(fmt.Sprintf(`"%s"`, v))) + if err == nil { + return json.Marshal(uriTemplate.String()) + } + return nil, fmt.Errorf("invalid string for URITemplateOrRuntimeExpr: %s", v) + default: + return nil, fmt.Errorf("unsupported type for URITemplateOrRuntimeExpr: %T", v) + } +} + +func (u *URITemplateOrRuntimeExpr) String() string { + switch v := u.Value.(type) { + case URITemplate: + return v.String() + case RuntimeExpression: + return v.String() + } + return "" +} + +// JsonPointerOrRuntimeExpression represents a type that can be a JSON Pointer or a RuntimeExpression. +type JsonPointerOrRuntimeExpression struct { + Value interface{} `json:"-" validate:"json_pointer_or_runtime_expr"` // Custom validation tag. +} + +// JSONPointerPattern validates JSON Pointers as per RFC 6901. +var JSONPointerPattern = regexp.MustCompile(`^(/([^/~]|~[01])*)*$`) + +// UnmarshalJSON unmarshals data into either a JSON Pointer or a RuntimeExpression. +func (j *JsonPointerOrRuntimeExpression) UnmarshalJSON(data []byte) error { + // Attempt to decode as a JSON Pointer + var jsonPointer string + if err := json.Unmarshal(data, &jsonPointer); err == nil { + if JSONPointerPattern.MatchString(jsonPointer) { + j.Value = jsonPointer + return nil + } + } + + // Attempt to decode as RuntimeExpression + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(data, &runtimeExpr); err == nil { + if runtimeExpr.IsValid() { + j.Value = runtimeExpr + return nil + } + } + + // If neither succeeds, return an error + return fmt.Errorf("JsonPointerOrRuntimeExpression must be a valid JSON Pointer or RuntimeExpression") +} + +// MarshalJSON marshals JsonPointerOrRuntimeExpression into JSON. +func (j *JsonPointerOrRuntimeExpression) MarshalJSON() ([]byte, error) { + switch v := j.Value.(type) { + case string: // JSON Pointer + return json.Marshal(v) + case RuntimeExpression: + return json.Marshal(v.String()) + default: + return nil, fmt.Errorf("JsonPointerOrRuntimeExpression contains unsupported type") + } +} diff --git a/model/objects_test.go b/model/objects_test.go new file mode 100644 index 0000000..c77d3bb --- /dev/null +++ b/model/objects_test.go @@ -0,0 +1,190 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestObjectOrRuntimeExpr_UnmarshalJSON(t *testing.T) { + cases := []struct { + Name string + JSON string + Expected interface{} + ShouldErr bool + }{ + { + Name: "Unmarshal valid string", + JSON: `"${ expression }"`, + Expected: RuntimeExpression{Value: "${ expression }"}, + ShouldErr: false, + }, + { + Name: "Unmarshal valid object", + JSON: `{ + "key": "value" + }`, + Expected: map[string]interface{}{ + "key": "value", + }, + ShouldErr: false, + }, + { + Name: "Unmarshal invalid type", + JSON: `123`, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + var obj ObjectOrRuntimeExpr + err := json.Unmarshal([]byte(tc.JSON), &obj) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + assert.Equal(t, tc.Expected, obj.Value, "unexpected unmarshalled value") + } + }) + } +} + +func TestURITemplateOrRuntimeExprValidation(t *testing.T) { + cases := []struct { + Name string + Input *URITemplateOrRuntimeExpr + ShouldErr bool + }{ + { + Name: "Valid URI template", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUriTemplate{Value: "http://example.com/{id}"}, + }, + ShouldErr: false, + }, + { + Name: "Valid URI", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "http://example.com"}, + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid runtime expression", + Input: &URITemplateOrRuntimeExpr{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Invalid URI format", + Input: &URITemplateOrRuntimeExpr{ + Value: &LiteralUri{Value: "invalid-uri"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: &URITemplateOrRuntimeExpr{ + Value: 123, + }, + ShouldErr: true, + }, + { + Name: "Valid URI as string", + Input: &URITemplateOrRuntimeExpr{ + Value: "http://example.com", + }, + ShouldErr: false, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "uri_template_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} + +func TestJsonPointerOrRuntimeExpressionValidation(t *testing.T) { + cases := []struct { + Name string + Input JsonPointerOrRuntimeExpression + ShouldErr bool + }{ + { + Name: "Valid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "/valid/json/pointer", + }, + ShouldErr: false, + }, + { + Name: "Valid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "${expression}"}, + }, + ShouldErr: false, + }, + { + Name: "Invalid JSON Pointer", + Input: JsonPointerOrRuntimeExpression{ + Value: "invalid-json-pointer", + }, + ShouldErr: true, + }, + { + Name: "Invalid runtime expression", + Input: JsonPointerOrRuntimeExpression{ + Value: RuntimeExpression{Value: "123invalid-expression"}, + }, + ShouldErr: true, + }, + { + Name: "Unsupported type", + Input: JsonPointerOrRuntimeExpression{ + Value: 123, + }, + ShouldErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Var(tc.Input, "json_pointer_or_runtime_expr") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } + }) + } +} diff --git a/model/operation_state.go b/model/operation_state.go deleted file mode 100644 index c530ad8..0000000 --- a/model/operation_state.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// OperationState defines a set of actions to be performed in sequence or in parallel. -// +builder-gen:new-call=ApplyDefault -type OperationState struct { - // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions" validate:"min=0,dive"` - // State specific timeouts - // +optional - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` -} - -func (a *OperationState) MarshalJSON() ([]byte, error) { - type Alias OperationState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(a), - Timeouts: a.Timeouts, - }) - return custom, err -} - -type operationStateUnmarshal OperationState - -// UnmarshalJSON unmarshal OperationState object from json bytes -func (o *OperationState) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) -} - -// ApplyDefault set the default values for Operation State -func (o *OperationState) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// OperationStateTimeout defines the specific timeout settings for operation state -type OperationStateTimeout struct { - // Defines workflow state execution timeout. - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/operation_state_test.go b/model/operation_state_test.go deleted file mode 100644 index 4939797..0000000 --- a/model/operation_state_test.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestOperationStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OperationState - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"actionMode": "parallel"}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "actionMode unset", - data: `{}`, - expect: OperationState{ - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `{"actionMode": parallel}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: `invalid character 'p' looking for beginning of value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OperationState{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/operation_state_validator_test.go b/model/operation_state_validator_test.go deleted file mode 100644 index 5da6dba..0000000 --- a/model/operation_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildOperationState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: ActionModeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOperationStateTimeout(state *State) *OperationStateTimeout { - state.OperationState.Timeouts = &OperationStateTimeout{ - ActionExecTimeout: "PT5S", - } - return state.OperationState.Timeouts -} - -func TestOperationStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions = []Action{} - return *model - }, - Err: ``, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].actionMode need by one of [sequential parallel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOperationStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - operationStateTimeout := buildOperationStateTimeout(operationState) - buildStateExecTimeoutByOperationStateTimeout(operationStateTimeout) - - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/parallel_state.go b/model/parallel_state.go deleted file mode 100644 index f65b7a1..0000000 --- a/model/parallel_state.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// CompletionType define on how to complete branch execution. -type CompletionType string - -func (i CompletionType) KindValues() []string { - return []string{ - string(CompletionTypeAllOf), - string(CompletionTypeAtLeast), - } -} - -func (i CompletionType) String() string { - return string(i) -} - -const ( - // CompletionTypeAllOf defines all branches must complete execution before the state can transition/end. - CompletionTypeAllOf CompletionType = "allOf" - // CompletionTypeAtLeast defines state can transition/end once at least the specified number of branches - // have completed execution. - CompletionTypeAtLeast CompletionType = "atLeast" -) - -// ParallelState Consists of a number of states that are executed in parallel -// +builder-gen:new-call=ApplyDefault -type ParallelState struct { - // List of branches for this parallel state. - // +kubebuilder:validation:MinItems=1 - Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. Defaults to `allOf`. - // +kubebuilder:validation:Enum=allOf;atLeast - // +kubebuilder:default=allOf - CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneofkind"` - // Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete - // in order for the state to transition/end. - // +optional - // TODO: change this field to unmarshal result as int - NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` - // State specific timeouts - // +optional - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` -} - -func (p *ParallelState) MarshalJSON() ([]byte, error) { - type Alias ParallelState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(p), - Timeouts: p.Timeouts, - }) - return custom, err -} - -type parallelStateUnmarshal ParallelState - -// UnmarshalJSON unmarshal ParallelState object from json bytes -func (ps *ParallelState) UnmarshalJSON(data []byte) error { - ps.ApplyDefault() - return util.UnmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) -} - -// ApplyDefault set the default values for Parallel State -func (ps *ParallelState) ApplyDefault() { - ps.CompletionType = CompletionTypeAllOf -} - -// Branch Definition -type Branch struct { - // Branch name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Actions to be executed in this branch - // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions" validate:"required,min=1,dive"` - // Branch specific timeout settings - // +optional - Timeouts *BranchTimeouts `json:"timeouts,omitempty"` -} - -// BranchTimeouts defines the specific timeout settings for branch -type BranchTimeouts struct { - // Single actions definition execution timeout duration (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Single branch execution timeout duration (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// ParallelStateTimeout defines the specific timeout settings for parallel state -type ParallelStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single branch execution timeout (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go deleted file mode 100644 index b95cc69..0000000 --- a/model/parallel_state_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestParallelStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ParallelState - err string - } - testCases := []testCase{ - { - desp: "all field set", - data: `{"completionType": "allOf", "numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - { - desp: "all optional field not set", - data: `{"numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ParallelState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/parallel_state_validator.go b/model/parallel_state_validator.go deleted file mode 100644 index 5999071..0000000 --- a/model/parallel_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(parallelStateStructLevelValidation, ParallelState{}) -} - -// ParallelStateStructLevelValidation custom validator for ParallelState -func parallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - parallelStateObj := structLevel.Current().Interface().(ParallelState) - - if parallelStateObj.CompletionType == CompletionTypeAtLeast { - if !val.ValidateGt0IntStr(¶llelStateObj.NumCompleted) { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "NumCompleted", "gt0", "") - } - } -} diff --git a/model/parallel_state_validator_test.go b/model/parallel_state_validator_test.go deleted file mode 100644 index d1acea9..0000000 --- a/model/parallel_state_validator_test.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildParallelState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeParallel, - }, - ParallelState: &ParallelState{ - CompletionType: CompletionTypeAllOf, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildBranch(state *State, name string) *Branch { - branch := Branch{ - Name: name, - } - - state.ParallelState.Branches = append(state.ParallelState.Branches, branch) - return &state.ParallelState.Branches[len(state.ParallelState.Branches)-1] -} - -func buildBranchTimeouts(branch *Branch) *BranchTimeouts { - branch.Timeouts = &BranchTimeouts{} - return branch.Timeouts -} - -func buildParallelStateTimeout(state *State) *ParallelStateTimeout { - state.ParallelState.Timeouts = &ParallelStateTimeout{ - BranchExecTimeout: "PT5S", - } - return state.ParallelState.Timeouts -} - -func TestParallelStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success completionTypeAllOf", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "success completionTypeAtLeast", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - model.States[0].ParallelState.NumCompleted = intstr.FromInt(1) - return *model - }, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + " invalid" - return *model - }, - Err: `workflow.states[0].parallelState.completionType need by one of [allOf atLeast]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = nil - model.States[0].ParallelState.CompletionType = "" - return *model - }, - Err: `workflow.states[0].parallelState.branches is required -workflow.states[0].parallelState.completionType is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = []Branch{} - return *model - }, - Err: `workflow.states[0].parallelState.branches must have the minimum 1`, - }, - { - Desp: "required numCompleted", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - return *model - }, - Err: `workflow.states[0].parallelState.numCompleted must be greater than 0`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Name = "" - model.States[0].ParallelState.Branches[0].Actions = nil - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].name is required -workflow.states[0].parallelState.branches[0].actions is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Actions = []Action{} - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].actions must have the minimum 1`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchTimeoutsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - buildBranchTimeouts(branch) - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "PT5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "PT5S" - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "P5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].parallelState.branches[0].timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestParallelStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildParallelStateTimeout(parallelState) - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/retry.go b/model/retry.go deleted file mode 100644 index 9fe6e78..0000000 --- a/model/retry.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -// Retry ... -// +builder-gen:new-call=ApplyDefault -type Retry struct { - // Unique retry strategy name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Time delay between retry attempts (ISO 8601 duration format) - Delay string `json:"delay,omitempty" validate:"omitempty,iso8601duration"` - // Maximum time delay between retry attempts (ISO 8601 duration format) - MaxDelay string `json:"maxDelay,omitempty" validate:"omitempty,iso8601duration"` - // Static value by which the delay increases during each attempt (ISO 8601 time format) - Increment string `json:"increment,omitempty" validate:"omitempty,iso8601duration"` - // Numeric value, if specified the delay between retries is multiplied by this value. - // +optional - Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` - // Maximum number of retry attempts. - // +kubebuilder:validation:Required - MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` - // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) - // TODO: make iso8601duration compatible this type - Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` -} - -type retryUnmarshal Retry - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retry) UnmarshalJSON(data []byte) error { - r.ApplyDefault() - return util.UnmarshalObject("retry", data, (*retryUnmarshal)(r)) -} - -func (r *Retry) ApplyDefault() { - r.MaxAttempts = intstr.FromInt32(1) -} diff --git a/model/retry_test.go b/model/retry_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/retry_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/retry_validator.go b/model/retry_validator.go deleted file mode 100644 index bd2e755..0000000 --- a/model/retry_validator.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidation(retryStructLevelValidation, Retry{}) - val.GetValidator().RegisterStructValidation(floatstr.ValidateFloat32OrString, Retry{}) -} - -// RetryStructLevelValidation custom validator for Retry Struct -func retryStructLevelValidation(structLevel validator.StructLevel) { - retryObj := structLevel.Current().Interface().(Retry) - - if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { - err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") - } - } -} diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go deleted file mode 100644 index 8b73243..0000000 --- a/model/retry_validator_test.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -func TestRetryStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildRetryRef(baseWorkflow, action1, "retry 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "PT5S" - model.Retries[0].MaxDelay = "PT5S" - model.Retries[0].Increment = "PT5S" - model.Retries[0].Jitter = floatstr.FromString("0.5") - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Name = "" - model.States[0].OperationState.Actions[0].RetryRef = "" - return *model - }, - Err: `workflow.retries[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries = append(model.Retries, model.Retries[0]) - return *model - }, - Err: `workflow.retries has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].RetryRef = "invalid retry" - return *model - }, - Err: `workflow.states[0].actions[0].retryRef don't exist "invalid retry"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "P5S" - model.Retries[0].MaxDelay = "P5S" - model.Retries[0].Increment = "P5S" - model.Retries[0].Jitter = floatstr.FromString("P5S") - - return *model - }, - Err: `workflow.retries[0].delay invalid iso8601 duration "P5S" -workflow.retries[0].maxDelay invalid iso8601 duration "P5S" -workflow.retries[0].increment invalid iso8601 duration "P5S"`, - }, - { - Desp: "multiplier less than zero", - Model: func() Workflow { - multiplierZero := floatstr.FromString("0") - model := baseWorkflow.DeepCopy() - model.Retries[0].Multiplier = &multiplierZero - - return *model - }, - Err: `workflow.retries[0].multiplier must have the minimum `, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/runtime_expression.go b/model/runtime_expression.go new file mode 100644 index 0000000..c67a3ef --- /dev/null +++ b/model/runtime_expression.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "github.com/itchyny/gojq" + "strings" +) + +// RuntimeExpression represents a runtime expression. +type RuntimeExpression struct { + Value string `json:"-" validate:"required"` +} + +// NewRuntimeExpression is an alias for NewExpr +var NewRuntimeExpression = NewExpr + +// NewExpr creates a new RuntimeExpression instance +func NewExpr(runtimeExpression string) *RuntimeExpression { + return &RuntimeExpression{Value: runtimeExpression} +} + +// preprocessExpression removes `${}` if present and returns the inner content. +func preprocessExpression(expression string) string { + if strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") { + return strings.TrimSpace(expression[2 : len(expression)-1]) + } + return expression // Return the expression as-is if `${}` are not present +} + +// IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. +func (r *RuntimeExpression) IsValid() bool { + // Preprocess to extract content inside `${}` if present + processedExpr := preprocessExpression(r.Value) + + // Validate the processed expression using gojq + _, err := gojq.Parse(processedExpr) + return err == nil +} + +// UnmarshalJSON implements custom unmarshalling for RuntimeExpression. +func (r *RuntimeExpression) UnmarshalJSON(data []byte) error { + // Decode the input as a string + var raw string + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RuntimeExpression: %w", err) + } + + // Assign the value + r.Value = raw + + // Validate the runtime expression + if !r.IsValid() { + return fmt.Errorf("invalid runtime expression format: %s", raw) + } + + return nil +} + +// MarshalJSON implements custom marshalling for RuntimeExpression. +func (r *RuntimeExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(r.Value) +} + +func (r *RuntimeExpression) String() string { + return r.Value +} diff --git a/model/runtime_expression_test.go b/model/runtime_expression_test.go new file mode 100644 index 0000000..296e1de --- /dev/null +++ b/model/runtime_expression_test.go @@ -0,0 +1,70 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRuntimeExpressionUnmarshalJSON(t *testing.T) { + tests := []struct { + Name string + JSONInput string + Expected string + ExpectErr bool + }{ + { + Name: "Valid RuntimeExpression", + JSONInput: `{ "expression": "${runtime.value}" }`, + Expected: "${runtime.value}", + ExpectErr: false, + }, + { + Name: "Invalid RuntimeExpression", + JSONInput: `{ "expression": "1234invalid_runtime" }`, + Expected: "", + ExpectErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.Name, func(t *testing.T) { + var acme *RuntimeExpressionAcme + err := json.Unmarshal([]byte(tc.JSONInput), &acme) + + if tc.ExpectErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, acme.Expression.Value) + } + + // Test marshalling + if !tc.ExpectErr { + output, err := json.Marshal(acme) + assert.NoError(t, err) + assert.JSONEq(t, tc.JSONInput, string(output)) + } + }) + } +} + +// EndpointAcme represents a struct using URITemplate. +type RuntimeExpressionAcme struct { + Expression RuntimeExpression `json:"expression"` +} diff --git a/model/sleep_state.go b/model/sleep_state.go deleted file mode 100644 index 5d144c5..0000000 --- a/model/sleep_state.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// SleepState suspends workflow execution for a given time duration. -type SleepState struct { - // Duration (ISO 8601 duration format) to sleep - // +kubebuilder:validation:Required - Duration string `json:"duration" validate:"required,iso8601duration"` - // Timeouts State specific timeouts - // +optional - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SleepState) MarshalJSON() ([]byte, error) { - type Alias SleepState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - return custom, err -} - -// SleepStateTimeout defines timeout settings for sleep state -type SleepStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/sleep_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/sleep_state_validator_test.go b/model/sleep_state_validator_test.go deleted file mode 100644 index 057d6b3..0000000 --- a/model/sleep_state_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildSleepState(workflow *Workflow, name, duration string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSleep, - }, - SleepState: &SleepState{ - Duration: duration, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildSleepStateTimeout(state *State) *SleepStateTimeout { - state.SleepState.Timeouts = &SleepStateTimeout{} - return state.SleepState.Timeouts -} - -func TestSleepStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "" - return *model - }, - Err: `workflow.states[0].sleepState.duration is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "P5S" - return *model - }, - Err: `workflow.states[0].sleepState.duration invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - sleepStateTimeout := buildSleepStateTimeout(sleepState) - buildStateExecTimeoutBySleepStateTimeout(sleepStateTimeout) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go deleted file mode 100644 index 0a53fd8..0000000 --- a/model/state_exec_timeout.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// StateExecTimeout defines workflow state execution timeout -type StateExecTimeout struct { - // Single state execution timeout, not including retries (ISO 8601 duration format) - // +optional - Single string `json:"single,omitempty" validate:"omitempty,iso8601duration"` - // Total state execution timeout, including retries (ISO 8601 duration format) - // +kubebuilder:validation:Required - Total string `json:"total" validate:"required,iso8601duration"` -} - -type stateExecTimeoutUnmarshal StateExecTimeout - -// UnmarshalJSON unmarshal StateExecTimeout object from json bytes -func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) -} diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go deleted file mode 100644 index 6030395..0000000 --- a/model/state_exec_timeout_test.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - - expect *StateExecTimeout - err string - } - testCases := []testCase{ - { - desp: "normal string", - data: `"PT10S"`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total", - data: `{ - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total & single", - data: `{ - "single": "PT1S", - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "PT1S", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "invalid string or object", - data: `PT10S`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout has a syntax error "invalid character 'P' looking for beginning of value"`, - }, - { - desp: "invalid total type", - data: `{ - "single": "PT1S", - "total": 10 - }`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout.total must be string`, - }, - { - desp: "invalid single type", - data: `{ - "single": 1, - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: `stateExecTimeout.single must be string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - actual := &StateExecTimeout{} - err := actual.UnmarshalJSON([]byte(tc.data)) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, actual) - }) - } -} diff --git a/model/state_exec_timeout_validator_test.go b/model/state_exec_timeout_validator_test.go deleted file mode 100644 index 5a2f794..0000000 --- a/model/state_exec_timeout_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildStateExecTimeoutByTimeouts(timeouts *Timeouts) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutBySleepStateTimeout(timeouts *SleepStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutByOperationStateTimeout(timeouts *OperationStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.ActionExecTimeout = "PT5S" - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func TestStateExecTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - timeouts := buildTimeouts(baseWorkflow) - buildStateExecTimeoutByTimeouts(timeouts) - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "" - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.total is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "P5S" - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "P5S" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.single invalid iso8601 duration "P5S" -workflow.timeouts.stateExecTimeout.total invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/states.go b/model/states.go deleted file mode 100644 index a19429d..0000000 --- a/model/states.go +++ /dev/null @@ -1,283 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// StateType ... -type StateType string - -func (s StateType) KindValues() []string { - return []string{ - string(StateTypeDelay), - string(StateTypeEvent), - string(StateTypeOperation), - string(StateTypeParallel), - string(StateTypeSwitch), - string(StateTypeForEach), - string(StateTypeInject), - string(StateTypeCallback), - string(StateTypeSleep), - } -} - -func (s StateType) String() string { - return string(s) -} - -const ( - // StateTypeDelay ... - StateTypeDelay StateType = "delay" - // StateTypeEvent ... - StateTypeEvent StateType = "event" - // StateTypeOperation ... - StateTypeOperation StateType = "operation" - // StateTypeParallel ... - StateTypeParallel StateType = "parallel" - // StateTypeSwitch ... - StateTypeSwitch StateType = "switch" - // StateTypeForEach ... - StateTypeForEach StateType = "foreach" - // StateTypeInject ... - StateTypeInject StateType = "inject" - // StateTypeCallback ... - StateTypeCallback StateType = "callback" - // StateTypeSleep ... - StateTypeSleep StateType = "sleep" -) - -// BaseState ... -type BaseState struct { - // Unique State id. - // +optional - ID string `json:"id,omitempty"` - // State name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // stateType can be any of delay, callback, event, foreach, inject, operation, parallel, sleep, switch - // +kubebuilder:validation:Enum:=delay;callback;event;foreach;inject;operation;parallel;sleep;switch - // +kubebuilder:validation:Required - Type StateType `json:"type" validate:"required,oneofkind"` - // States error handling and retries definitions. - // +optional - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` - // Next transition of the workflow after the time delay. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // State data filter. - // +optional - StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` - // Unique Name of a workflow state which is responsible for compensation of this state. - // +optional - CompensatedBy string `json:"compensatedBy,omitempty"` - // If true, this state is used to compensate another state. Default is false. - // +optional - UsedForCompensation bool `json:"usedForCompensation,omitempty"` - // State end definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` - // Metadata information. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata *Metadata `json:"metadata,omitempty"` -} - -func (b *BaseState) MarshalJSON() ([]byte, error) { - type Alias BaseState - if b == nil { - return nil, nil - } - cus, err := json.Marshal(struct { - *Alias - }{ - Alias: (*Alias)(b), - }) - return cus, err -} - -// +builder-gen:embedded-ignore-method=BaseState -type State struct { - BaseState `json:",inline"` - // delayState Causes the workflow execution to delay for a specified duration. - // +optional - *DelayState `json:"delayState,omitempty"` - // event states await one or more events and perform actions when they are received. If defined as the - // workflow starting state, the event state definition controls when the workflow instances should be created. - // +optional - *EventState `json:"eventState,omitempty"` - // operationState defines a set of actions to be performed in sequence or in parallel. - // +optional - *OperationState `json:"operationState,omitempty"` - // parallelState Consists of a number of states that are executed in parallel. - // +optional - *ParallelState `json:"parallelState,omitempty"` - // switchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. - // +optional - *SwitchState `json:"switchState,omitempty"` - // forEachState used to execute actions for each element of a data set. - // +optional - *ForEachState `json:"forEachState,omitempty"` - // injectState used to inject static data into state data input. - // +optional - *InjectState `json:"injectState,omitempty"` - // callbackState executes a function and waits for callback event that indicates completion of the task. - // +optional - *CallbackState `json:"callbackState,omitempty"` - // sleepState suspends workflow execution for a given time duration. - // +optional - *SleepState `json:"sleepState,omitempty"` -} - -func (s *State) MarshalJSON() ([]byte, error) { - if s == nil { - return nil, nil - } - r := []byte("") - var errs error - - if s.DelayState != nil { - r, errs = s.DelayState.MarshalJSON() - } - - if s.EventState != nil { - r, errs = s.EventState.MarshalJSON() - } - - if s.OperationState != nil { - r, errs = s.OperationState.MarshalJSON() - } - - if s.ParallelState != nil { - r, errs = s.ParallelState.MarshalJSON() - } - - if s.SwitchState != nil { - r, errs = s.SwitchState.MarshalJSON() - } - - if s.ForEachState != nil { - r, errs = s.ForEachState.MarshalJSON() - } - - if s.InjectState != nil { - r, errs = s.InjectState.MarshalJSON() - } - - if s.CallbackState != nil { - r, errs = s.CallbackState.MarshalJSON() - } - - if s.SleepState != nil { - r, errs = s.SleepState.MarshalJSON() - } - - b, err := s.BaseState.MarshalJSON() - if err != nil { - return nil, err - } - - //remove }{ as BaseState and the State Type needs to be merged together - partialResult := append(b, r...) - result := strings.Replace(string(partialResult), "}{", ",", 1) - return []byte(result), errs -} - -type unmarshalState State - -// UnmarshalJSON implements json.Unmarshaler -func (s *State) UnmarshalJSON(data []byte) error { - if err := util.UnmarshalObject("state", data, (*unmarshalState)(s)); err != nil { - return err - } - - switch s.Type { - case StateTypeDelay: - state := &DelayState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.DelayState = state - - case StateTypeEvent: - state := &EventState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.EventState = state - - case StateTypeOperation: - state := &OperationState{} - if err := util.UnmarshalObject("states", data, state); err != nil { - return err - } - s.OperationState = state - - case StateTypeParallel: - state := &ParallelState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ParallelState = state - - case StateTypeSwitch: - state := &SwitchState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SwitchState = state - - case StateTypeForEach: - state := &ForEachState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ForEachState = state - - case StateTypeInject: - state := &InjectState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.InjectState = state - - case StateTypeCallback: - state := &CallbackState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.CallbackState = state - - case StateTypeSleep: - state := &SleepState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SleepState = state - default: - return fmt.Errorf("states type %q not supported", s.Type.String()) - } - return nil -} diff --git a/model/states_validator.go b/model/states_validator.go deleted file mode 100644 index 1bb58e5..0000000 --- a/model/states_validator.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(baseStateStructLevelValidationCtx), BaseState{}) -} - -func baseStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - baseState := structLevel.Current().Interface().(BaseState) - if baseState.Type != StateTypeSwitch && !baseState.UsedForCompensation { - validTransitionAndEnd(structLevel, baseState, baseState.Transition, baseState.End) - } - - if baseState.CompensatedBy != "" { - if baseState.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagRecursiveCompensation, "") - } - - if ctx.ExistState(baseState.CompensatedBy) { - value := ctx.States[baseState.CompensatedBy].BaseState - if value.UsedForCompensation && value.Type == StateTypeEvent { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedbyEventState, "") - - } else if !value.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedby, "") - } - - } else { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagExists, "") - } - } -} diff --git a/model/states_validator_test.go b/model/states_validator_test.go deleted file mode 100644 index 8766d87..0000000 --- a/model/states_validator_test.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func TestBaseStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 3) - - operationState := buildOperationState(baseWorkflow, "start state 1") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "state 2") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - eventState := buildEventState(baseWorkflow, "state 3") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat name", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States = []State{model.States[0], model.States[0]} - return *model - }, - Err: `workflow.states has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = "invalid state compensate by" - return *model - }, - Err: `workflow.states[0].compensatedBy don't exist "invalid state compensate by"`, - }, - { - Desp: "tagcompensatedby", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = model.States[1].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 2" is not defined as usedForCompensation`, - }, - { - Desp: "compensatedbyeventstate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[2].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[2].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 3" is defined as usedForCompensation and cannot be an event state`, - }, - { - Desp: "recursivecompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "start state 1" is defined as usedForCompensation (cannot themselves set their compensatedBy)`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End = nil - return *model - }, - Err: `workflow.states[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByState(&model.States[0], &model.States[1], false) - - return *model - }, - Err: `workflow.states[0].transition exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Type = StateTypeOperation + "invalid" - return *model - }, - Err: `workflow.states[0].type need by one of [delay event operation parallel switch foreach inject callback sleep]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/switch_state.go b/model/switch_state.go deleted file mode 100644 index 88d0c83..0000000 --- a/model/switch_state.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -type EventConditions []EventCondition - -// SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. -type SwitchState struct { - // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. - - // Default transition of the workflow if there is no matching data conditions. Can include a transition or - // end definition. - DefaultCondition DefaultCondition `json:"defaultCondition"` - // Defines conditions evaluated against events. - // +optional - EventConditions EventConditions `json:"eventConditions" validate:"dive"` - // Defines conditions evaluated against data - // +optional - DataConditions []DataCondition `json:"dataConditions" validate:"dive"` - // SwitchState specific timeouts - // +optional - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SwitchState) MarshalJSON() ([]byte, error) { - type Alias SwitchState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - - // Avoid marshal empty objects as null. - st := strings.Replace(string(custom), "\"eventConditions\":null,", "", 1) - st = strings.Replace(st, "\"dataConditions\":null,", "", 1) - st = strings.Replace(st, "\"end\":null,", "", -1) - return []byte(st), err -} - -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - // Each state can define a transition definition that is used to determine which state to transition to next. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // If this state an end state - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` -} - -type defaultConditionUnmarshal DefaultCondition - -// UnmarshalJSON implements json.Unmarshaler -func (e *DefaultCondition) UnmarshalJSON(data []byte) error { - var nextState string - err := util.UnmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) - if err != nil { - return err - } - - if nextState != "" { - e.Transition = &Transition{NextState: nextState} - } - - return err -} - -// SwitchStateTimeout defines the specific timeout settings for switch state -type SwitchStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. - // NOTE: this is only available for EventConditions - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// EventCondition specify events which the switch state must wait for. -type EventCondition struct { - // Event condition name. - // +optional - Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` - // Metadata information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition" validate:"omitempty"` -} - -// DataCondition specify a data-based condition statement which causes a transition to another workflow state -// if evaluated to true. -type DataCondition struct { - // Data condition name. - // +optional - Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false. - // +kubebuilder:validation:Required - Condition string `json:"condition" validate:"required"` - // Metadata information. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - Transition *Transition `json:"transition,omitempty" validate:"omitempty"` -} diff --git a/model/switch_state_test.go b/model/switch_state_test.go deleted file mode 100644 index e2f5c51..0000000 --- a/model/switch_state_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDefaultConditionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect DefaultCondition - err string - } - - testCases := []testCase{ - { - desp: "json nextState success", - data: `{"transition": {"nextState": "next state"}}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json nextState", - data: `{"transition": {"nextState": "next state}}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json nextState type", - data: `{"transition": {"nextState": true}}`, - err: `transition.nextState must be string`, - }, - { - desp: "transition json success", - data: `{"transition": "next state"}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json transition", - data: `{"transition": "next state}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json transition type", - data: `{"transition": true}`, - err: `transition must be string or object`, - }, - { - desp: "string success", - data: `"next state"`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid string syntax", - data: `"next state`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid type", - data: `123`, - err: `defaultCondition must be string or object`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DefaultCondition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/switch_state_validator.go b/model/switch_state_validator.go deleted file mode 100644 index 5738104..0000000 --- a/model/switch_state_validator.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(switchStateStructLevelValidation), SwitchState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(defaultConditionStructLevelValidation), DefaultCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventConditionStructLevelValidationCtx), EventCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(dataConditionStructLevelValidation), DataCondition{}) -} - -// SwitchStateStructLevelValidation custom validator for SwitchState -func switchStateStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - switchState := structLevel.Current().Interface().(SwitchState) - - switch { - case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagRequired, "") - case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagExclusive, "") - } -} - -// DefaultConditionStructLevelValidation custom validator for DefaultCondition -func defaultConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - defaultCondition := structLevel.Current().Interface().(DefaultCondition) - validTransitionAndEnd(structLevel, defaultCondition, defaultCondition.Transition, defaultCondition.End) -} - -// EventConditionStructLevelValidation custom validator for EventCondition -func eventConditionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - eventCondition := structLevel.Current().Interface().(EventCondition) - validTransitionAndEnd(structLevel, eventCondition, eventCondition.Transition, eventCondition.End) - - if eventCondition.EventRef != "" && !ctx.ExistEvent(eventCondition.EventRef) { - structLevel.ReportError(eventCondition.EventRef, "eventRef", "EventRef", val.TagExists, "") - } -} - -// DataConditionStructLevelValidation custom validator for DataCondition -func dataConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - dataCondition := structLevel.Current().Interface().(DataCondition) - validTransitionAndEnd(structLevel, dataCondition, dataCondition.Transition, dataCondition.End) -} diff --git a/model/switch_state_validator_test.go b/model/switch_state_validator_test.go deleted file mode 100644 index 9c40462..0000000 --- a/model/switch_state_validator_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildSwitchState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSwitch, - }, - SwitchState: &SwitchState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildDefaultCondition(state *State) *DefaultCondition { - state.SwitchState.DefaultCondition = DefaultCondition{} - return &state.SwitchState.DefaultCondition -} - -func buildDataCondition(state *State, name, condition string) *DataCondition { - if state.SwitchState.DataConditions == nil { - state.SwitchState.DataConditions = []DataCondition{} - } - - dataCondition := DataCondition{ - Name: name, - Condition: condition, - } - - state.SwitchState.DataConditions = append(state.SwitchState.DataConditions, dataCondition) - return &state.SwitchState.DataConditions[len(state.SwitchState.DataConditions)-1] -} - -func buildEventCondition(workflow *Workflow, state *State, name, eventRef string) (*Event, *EventCondition) { - workflow.Events = append(workflow.Events, Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindConsumed, - }) - - eventCondition := EventCondition{ - Name: name, - EventRef: eventRef, - } - - state.SwitchState.EventConditions = append(state.SwitchState.EventConditions, eventCondition) - return &workflow.Events[len(workflow.Events)-1], &state.SwitchState.EventConditions[len(state.SwitchState.EventConditions)-1] -} - -func TestSwitchStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - swithState := buildSwitchState(baseWorkflow, "start state") - defaultCondition := buildDefaultCondition(swithState) - buildEndByDefaultCondition(defaultCondition, true, false) - - dataCondition := buildDataCondition(swithState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventCondition(model, &model.States[0], "event condition", "event 1") - buildEndByEventCondition(&model.States[0].SwitchState.EventConditions[0], true, false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDefaultConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - buildSwitchState(baseWorkflow, "start state") - buildDefaultCondition(&baseWorkflow.States[0]) - - buildDataCondition(&baseWorkflow.States[0], "data condition 1", "1=1") - buildEndByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[0], true, false) - buildDataCondition(&baseWorkflow.States[0], "data condition 2", "1=1") - - buildOperationState(baseWorkflow, "end state") - buildEndByState(&baseWorkflow.States[1], true, false) - buildActionByOperationState(&baseWorkflow.States[1], "action 1") - buildFunctionRef(baseWorkflow, &baseWorkflow.States[1].OperationState.Actions[0], "function 1") - - buildTransitionByDefaultCondition(&baseWorkflow.States[0].SwitchState.DefaultCondition, &baseWorkflow.States[1]) - buildTransitionByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[1], &baseWorkflow.States[1], false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSwitchStateTimeoutStructLevelValidation(t *testing.T) { -} - -func TestEventConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - // switch state - switchState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(switchState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // event condition 1 - _, eventCondition := buildEventCondition(baseWorkflow, switchState, "data condition 1", "event 1") - buildEndByEventCondition(eventCondition, true, false) - - // event condition 2 - _, eventCondition2 := buildEventCondition(baseWorkflow, switchState, "data condition 2", "event 2") - buildEndByEventCondition(eventCondition2, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - // trasition switch state to operation state - buildTransitionByEventCondition(eventCondition, operationState, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].EventRef = "event not found" - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].eventRef don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByEventCondition(&model.States[0].SwitchState.EventConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - // switch state - swithcState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(swithcState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // data condition - dataCondition := buildDataCondition(swithcState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/task.go b/model/task.go new file mode 100644 index 0000000..3bbeb4d --- /dev/null +++ b/model/task.go @@ -0,0 +1,418 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TaskBase struct { + // A runtime expression, if any, used to determine whether or not the task should be run. + If *RuntimeExpression `json:"if,omitempty" validate:"omitempty"` + // Configure the task's input. + Input *Input `json:"input,omitempty" validate:"omitempty"` + // Configure the task's output. + Output *Output `json:"output,omitempty" validate:"omitempty"` + // Export task output to context. + Export *Export `json:"export,omitempty" validate:"omitempty"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" validate:"omitempty"` + // The flow directive to be performed upon completion of the task. + Then *FlowDirective `json:"then,omitempty" validate:"omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` +} + +// Task represents a discrete unit of work in a workflow. +type Task interface{} + +// TaskItem represents a named task and its associated definition. +type TaskItem struct { + Key string `json:"-" validate:"required"` + Task Task `json:"-" validate:"required"` +} + +// MarshalJSON for TaskItem to ensure proper serialization as a key-value pair. +func (ti *TaskItem) MarshalJSON() ([]byte, error) { + if ti == nil { + return nil, fmt.Errorf("cannot marshal a nil TaskItem") + } + + // Serialize the Task + taskJSON, err := json.Marshal(ti.Task) + if err != nil { + return nil, fmt.Errorf("failed to marshal task: %w", err) + } + + // Create a map with the Key and Task + taskEntry := map[string]json.RawMessage{ + ti.Key: taskJSON, + } + + // Marshal the map into JSON + return json.Marshal(taskEntry) +} + +type NamedTaskMap map[string]Task + +// UnmarshalJSON for NamedTaskMap to ensure proper deserialization. +func (ntm *NamedTaskMap) UnmarshalJSON(data []byte) error { + var rawTasks map[string]json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for name, raw := range rawTasks { + task, err := unmarshalTask(name, raw) + if err != nil { + return err + } + + if *ntm == nil { + *ntm = make(map[string]Task) + } + (*ntm)[name] = task + } + + return nil +} + +// TaskList represents a list of named tasks to perform. +type TaskList []*TaskItem + +// UnmarshalJSON for TaskList to ensure proper deserialization. +func (tl *TaskList) UnmarshalJSON(data []byte) error { + var rawTasks []json.RawMessage + if err := json.Unmarshal(data, &rawTasks); err != nil { + return err + } + + for _, raw := range rawTasks { + var taskItemRaw map[string]json.RawMessage + if err := json.Unmarshal(raw, &taskItemRaw); err != nil { + return err + } + + if len(taskItemRaw) != 1 { + return errors.New("each TaskItem must have exactly one key") + } + + for key, taskRaw := range taskItemRaw { + task, err := unmarshalTask(key, taskRaw) + if err != nil { + return err + } + *tl = append(*tl, &TaskItem{Key: key, Task: task}) + } + } + + return nil +} + +var taskTypeRegistry = map[string]func() Task{ + "call_http": func() Task { return &CallHTTP{} }, + "call_openapi": func() Task { return &CallOpenAPI{} }, + "call_grpc": func() Task { return &CallGRPC{} }, + "call_asyncapi": func() Task { return &CallAsyncAPI{} }, + "call": func() Task { return &CallFunction{} }, + "do": func() Task { return &DoTask{} }, + "fork": func() Task { return &ForkTask{} }, + "emit": func() Task { return &EmitTask{} }, + "for": func() Task { return &ForTask{} }, + "listen": func() Task { return &ListenTask{} }, + "raise": func() Task { return &RaiseTask{} }, + "run": func() Task { return &RunTask{} }, + "set": func() Task { return &SetTask{} }, + "switch": func() Task { return &SwitchTask{} }, + "try": func() Task { return &TryTask{} }, + "wait": func() Task { return &WaitTask{} }, +} + +func unmarshalTask(key string, taskRaw json.RawMessage) (Task, error) { + var taskType map[string]interface{} + if err := json.Unmarshal(taskRaw, &taskType); err != nil { + return nil, fmt.Errorf("failed to parse task type for key '%s': %w", key, err) + } + + // Determine task type + var task Task + if callValue, hasCall := taskType["call"].(string); hasCall { + // Form composite key and check if it's in the registry + registryKey := fmt.Sprintf("call_%s", callValue) + if constructor, exists := taskTypeRegistry[registryKey]; exists { + task = constructor() + } else { + // Default to CallFunction for unrecognized call values + task = &CallFunction{} + } + } else { + // Handle non-call tasks (e.g., "do", "fork") + for typeKey := range taskType { + if constructor, exists := taskTypeRegistry[typeKey]; exists { + task = constructor() + break + } + } + } + + if task == nil { + return nil, fmt.Errorf("unknown task type for key '%s'", key) + } + + // Populate the task with raw data + if err := json.Unmarshal(taskRaw, task); err != nil { + return nil, fmt.Errorf("failed to unmarshal task '%s': %w", key, err) + } + + return task, nil +} + +// MarshalJSON for TaskList to ensure proper serialization. +func (tl *TaskList) MarshalJSON() ([]byte, error) { + return json.Marshal([]*TaskItem(*tl)) +} + +// Key retrieves a TaskItem by its key. +func (tl *TaskList) Key(key string) *TaskItem { + for _, item := range *tl { + if item.Key == key { + return item + } + } + return nil +} + +// AsTask extracts the TaskBase from the Task if the Task embeds TaskBase. +// Returns nil if the Task does not embed TaskBase. +func (ti *TaskItem) AsTask() *TaskBase { + if ti == nil || ti.Task == nil { + return nil + } + + // Use type assertions to check for TaskBase + switch task := ti.Task.(type) { + case *CallHTTP: + return &task.TaskBase + case *CallOpenAPI: + return &task.TaskBase + case *CallGRPC: + return &task.TaskBase + case *CallAsyncAPI: + return &task.TaskBase + case *CallFunction: + return &task.TaskBase + case *DoTask: + return &task.TaskBase + case *ForkTask: + return &task.TaskBase + case *EmitTask: + return &task.TaskBase + case *ForTask: + return &task.TaskBase + case *ListenTask: + return &task.TaskBase + case *RaiseTask: + return &task.TaskBase + case *RunTask: + return &task.TaskBase + case *SetTask: + return &task.TaskBase + case *SwitchTask: + return &task.TaskBase + case *TryTask: + return &task.TaskBase + case *WaitTask: + return &task.TaskBase + default: + // If the type does not embed TaskBase, return nil + return nil + } +} + +// AsCallHTTPTask casts the Task to a CallTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallHTTPTask() *CallHTTP { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallHTTP); ok { + return task + } + return nil +} + +// AsCallOpenAPITask casts the Task to a CallOpenAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallOpenAPITask() *CallOpenAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallOpenAPI); ok { + return task + } + return nil +} + +// AsCallGRPCTask casts the Task to a CallGRPC task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallGRPCTask() *CallGRPC { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallGRPC); ok { + return task + } + return nil +} + +// AsCallAsyncAPITask casts the Task to a CallAsyncAPI task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallAsyncAPITask() *CallAsyncAPI { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallAsyncAPI); ok { + return task + } + return nil +} + +// AsCallFunctionTask casts the Task to a CallFunction task if possible, returning nil if the cast fails. +func (ti *TaskItem) AsCallFunctionTask() *CallFunction { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*CallFunction); ok { + return task + } + return nil +} + +// AsDoTask casts the Task to a DoTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsDoTask() *DoTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*DoTask); ok { + return task + } + return nil +} + +// AsForkTask casts the Task to a ForkTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForkTask() *ForkTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForkTask); ok { + return task + } + return nil +} + +// AsEmitTask casts the Task to an EmitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsEmitTask() *EmitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*EmitTask); ok { + return task + } + return nil +} + +// AsForTask casts the Task to a ForTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsForTask() *ForTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ForTask); ok { + return task + } + return nil +} + +// AsListenTask casts the Task to a ListenTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsListenTask() *ListenTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*ListenTask); ok { + return task + } + return nil +} + +// AsRaiseTask casts the Task to a RaiseTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRaiseTask() *RaiseTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RaiseTask); ok { + return task + } + return nil +} + +// AsRunTask casts the Task to a RunTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsRunTask() *RunTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*RunTask); ok { + return task + } + return nil +} + +// AsSetTask casts the Task to a SetTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSetTask() *SetTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SetTask); ok { + return task + } + return nil +} + +// AsSwitchTask casts the Task to a SwitchTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsSwitchTask() *SwitchTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*SwitchTask); ok { + return task + } + return nil +} + +// AsTryTask casts the Task to a TryTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsTryTask() *TryTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*TryTask); ok { + return task + } + return nil +} + +// AsWaitTask casts the Task to a WaitTask if possible, returning nil if the cast fails. +func (ti *TaskItem) AsWaitTask() *WaitTask { + if ti == nil { + return nil + } + if task, ok := ti.Task.(*WaitTask); ok { + return task + } + return nil +} diff --git a/model/task_call.go b/model/task_call.go new file mode 100644 index 0000000..82412b0 --- /dev/null +++ b/model/task_call.go @@ -0,0 +1,112 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +type CallHTTP struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=http"` + With HTTPArguments `json:"with" validate:"required"` +} + +type HTTPArguments struct { + Method string `json:"method" validate:"required,oneofci=GET POST PUT DELETE PATCH"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` + Headers map[string]string `json:"headers,omitempty"` + Body json.RawMessage `json:"body,omitempty"` + Query map[string]interface{} `json:"query,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallOpenAPI struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required,eq=openapi"` + With OpenAPIArguments `json:"with" validate:"required"` +} + +type OpenAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + OperationID string `json:"operationId" validate:"required"` + Parameters map[string]interface{} `json:"parameters,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` + Output string `json:"output,omitempty" validate:"omitempty,oneof=raw content response"` +} + +type CallGRPC struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=grpc"` + With GRPCArguments `json:"with" validate:"required"` +} + +type GRPCArguments struct { + Proto *ExternalResource `json:"proto" validate:"required"` + Service GRPCService `json:"service" validate:"required"` + Method string `json:"method" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type GRPCService struct { + Name string `json:"name" validate:"required"` + Host string `json:"host" validate:"required,hostname_rfc1123"` + Port int `json:"port" validate:"required,min=0,max=65535"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` +} + +type CallAsyncAPI struct { + TaskBase `json:",inline"` + Call string `json:"call" validate:"required,eq=asyncapi"` + With AsyncAPIArguments `json:"with" validate:"required"` +} + +type AsyncAPIArguments struct { + Document *ExternalResource `json:"document" validate:"required"` + Channel string `json:"channel,omitempty"` + Operation string `json:"operation,omitempty"` + Server *AsyncAPIServer `json:"server,omitempty"` + Protocol string `json:"protocol,omitempty" validate:"omitempty,oneof=amqp amqp1 anypointmq googlepubsub http ibmmq jms kafka mercure mqtt mqtt5 nats pulsar redis sns solace sqs stomp ws"` + Message *AsyncAPIOutboundMessage `json:"message,omitempty"` + Subscription *AsyncAPISubscription `json:"subscription,omitempty"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty" validate:"omitempty"` +} + +type AsyncAPIServer struct { + Name string `json:"name" validate:"required"` + Variables map[string]interface{} `json:"variables,omitempty"` +} + +type AsyncAPIOutboundMessage struct { + Payload map[string]interface{} `json:"payload,omitempty" validate:"omitempty"` + Headers map[string]interface{} `json:"headers,omitempty" validate:"omitempty"` +} + +type AsyncAPISubscription struct { + Filter *RuntimeExpression `json:"filter,omitempty"` + Consume *AsyncAPIMessageConsumptionPolicy `json:"consume" validate:"required"` +} + +type AsyncAPIMessageConsumptionPolicy struct { + For *Duration `json:"for,omitempty"` + Amount int `json:"amount,omitempty" validate:"required_without_all=While Until"` + While *RuntimeExpression `json:"while,omitempty" validate:"required_without_all=Amount Until"` + Until *RuntimeExpression `json:"until,omitempty" validate:"required_without_all=Amount While"` +} + +type CallFunction struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Call string `json:"call" validate:"required"` + With map[string]interface{} `json:"with,omitempty"` +} diff --git a/model/task_call_test.go b/model/task_call_test.go new file mode 100644 index 0000000..0d10e69 --- /dev/null +++ b/model/task_call_test.go @@ -0,0 +1,480 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCallHTTP_MarshalJSON(t *testing.T) { + callHTTP := CallHTTP{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: string(FlowDirectiveContinue)}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com"}, + }, + Headers: map[string]string{ + "Authorization": "Bearer token", + }, + Query: map[string]interface{}{ + "q": "search", + }, + Output: "content", + }, + } + + data, err := json.Marshal(callHTTP) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }`, string(data)) +} + +func TestCallHTTP_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com", + "headers": {"Authorization": "Bearer token"}, + "query": {"q": "search"}, + "output": "content" + } + }` + + var callHTTP CallHTTP + err := json.Unmarshal([]byte(jsonData), &callHTTP) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${condition}"}, callHTTP.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callHTTP.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callHTTP.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callHTTP.Timeout) + assert.Equal(t, &FlowDirective{Value: string(FlowDirectiveContinue)}, callHTTP.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callHTTP.Metadata) + assert.Equal(t, "http", callHTTP.Call) + assert.Equal(t, "GET", callHTTP.With.Method) + assert.Equal(t, "http://example.com", callHTTP.With.Endpoint.String()) + assert.Equal(t, map[string]string{"Authorization": "Bearer token"}, callHTTP.With.Headers) + assert.Equal(t, map[string]interface{}{"q": "search"}, callHTTP.With.Query) + assert.Equal(t, "content", callHTTP.With.Output) +} + +func TestCallOpenAPI_MarshalJSON(t *testing.T) { + authPolicy := "my-auth" + callOpenAPI := CallOpenAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{ + Name: "MyOpenAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/openapi.json"}, + }, + }, + OperationID: "getUsers", + Parameters: map[string]interface{}{ + "param1": "value1", + "param2": "value2", + }, + Authentication: &ReferenceableAuthenticationPolicy{ + Use: &authPolicy, + }, + Output: "content", + }, + } + + data, err := json.Marshal(callOpenAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": "http://example.com/openapi.json" + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }`, string(data)) +} + +func TestCallOpenAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "openapi", + "with": { + "document": { + "name": "MyOpenAPIDoc", + "endpoint": { "uri": "http://example.com/openapi.json" } + }, + "operationId": "getUsers", + "parameters": { + "param1": "value1", + "param2": "value2" + }, + "authentication": { + "use": "my-auth" + }, + "output": "content" + } + }` + + var callOpenAPI CallOpenAPI + err := json.Unmarshal([]byte(jsonData), &callOpenAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callOpenAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callOpenAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callOpenAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callOpenAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callOpenAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callOpenAPI.Metadata) + assert.Equal(t, "openapi", callOpenAPI.Call) + assert.Equal(t, "MyOpenAPIDoc", callOpenAPI.With.Document.Name) + assert.Equal(t, "http://example.com/openapi.json", callOpenAPI.With.Document.Endpoint.EndpointConfig.URI.String()) + assert.Equal(t, "getUsers", callOpenAPI.With.OperationID) + assert.Equal(t, map[string]interface{}{"param1": "value1", "param2": "value2"}, callOpenAPI.With.Parameters) + assert.Equal(t, "my-auth", *callOpenAPI.With.Authentication.Use) + assert.Equal(t, "content", callOpenAPI.With.Output) +} + +func TestCallGRPC_MarshalJSON(t *testing.T) { + callGRPC := CallGRPC{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "grpc", + With: GRPCArguments{ + Proto: &ExternalResource{ + Name: "MyProtoFile", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/protofile"}, + }, + }, + Service: GRPCService{ + Name: "UserService", + Host: "example.com", + Port: 50051, + }, + Method: "GetUser", + Arguments: map[string]interface{}{"userId": "12345"}, + }, + } + + data, err := json.Marshal(callGRPC) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }`, string(data)) +} + +func TestCallGRPC_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "grpc", + "with": { + "proto": { + "name": "MyProtoFile", + "endpoint": "http://example.com/protofile" + }, + "service": { + "name": "UserService", + "host": "example.com", + "port": 50051 + }, + "method": "GetUser", + "arguments": { + "userId": "12345" + } + } + }` + + var callGRPC CallGRPC + err := json.Unmarshal([]byte(jsonData), &callGRPC) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callGRPC.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callGRPC.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callGRPC.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callGRPC.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callGRPC.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callGRPC.Metadata) + assert.Equal(t, "grpc", callGRPC.Call) + assert.Equal(t, "MyProtoFile", callGRPC.With.Proto.Name) + assert.Equal(t, "http://example.com/protofile", callGRPC.With.Proto.Endpoint.String()) + assert.Equal(t, "UserService", callGRPC.With.Service.Name) + assert.Equal(t, "example.com", callGRPC.With.Service.Host) + assert.Equal(t, 50051, callGRPC.With.Service.Port) + assert.Equal(t, "GetUser", callGRPC.With.Method) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callGRPC.With.Arguments) +} + +func TestCallAsyncAPI_MarshalJSON(t *testing.T) { + callAsyncAPI := CallAsyncAPI{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "asyncapi", + With: AsyncAPIArguments{ + Document: &ExternalResource{ + Name: "MyAsyncAPIDoc", + Endpoint: &Endpoint{ + URITemplate: &LiteralUri{Value: "http://example.com/asyncapi.json"}, + }, + }, + Operation: "user.signup", + Server: &AsyncAPIServer{Name: "default-server"}, + Message: &AsyncAPIOutboundMessage{Payload: map[string]interface{}{"userId": "12345"}}, + Protocol: "http", + }, + } + + data, err := json.Marshal(callAsyncAPI) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server" }, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + } + } + }`, string(data)) +} + +func TestCallAsyncAPI_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "asyncapi", + "with": { + "document": { + "name": "MyAsyncAPIDoc", + "endpoint": "http://example.com/asyncapi.json" + }, + "operation": "user.signup", + "server": { "name": "default-server"}, + "protocol": "http", + "message": { + "payload": { "userId": "12345" } + }, + "authentication": { + "use": "asyncapi-auth-policy" + } + } + }` + + var callAsyncAPI CallAsyncAPI + err := json.Unmarshal([]byte(jsonData), &callAsyncAPI) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callAsyncAPI.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callAsyncAPI.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callAsyncAPI.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callAsyncAPI.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callAsyncAPI.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callAsyncAPI.Metadata) + assert.Equal(t, "asyncapi", callAsyncAPI.Call) + assert.Equal(t, "MyAsyncAPIDoc", callAsyncAPI.With.Document.Name) + assert.Equal(t, "http://example.com/asyncapi.json", callAsyncAPI.With.Document.Endpoint.String()) + assert.Equal(t, "user.signup", callAsyncAPI.With.Operation) + assert.Equal(t, "default-server", callAsyncAPI.With.Server.Name) + assert.Equal(t, "http", callAsyncAPI.With.Protocol) + assert.Equal(t, map[string]interface{}{"userId": "12345"}, callAsyncAPI.With.Message.Payload) + assert.Equal(t, "asyncapi-auth-policy", *callAsyncAPI.With.Authentication.Use) +} + +func TestCallFunction_MarshalJSON(t *testing.T) { + callFunction := CallFunction{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Call: "myFunction", + With: map[string]interface{}{ + "param1": "value1", + "param2": 42, + }, + } + + data, err := json.Marshal(callFunction) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }`, string(data)) +} + +func TestCallFunction_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "call": "myFunction", + "with": { + "param1": "value1", + "param2": 42 + } + }` + + var callFunction CallFunction + err := json.Unmarshal([]byte(jsonData), &callFunction) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, callFunction.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, callFunction.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, callFunction.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, callFunction.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, callFunction.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, callFunction.Metadata) + assert.Equal(t, "myFunction", callFunction.Call) + + // Adjust numeric values for comparison + expectedWith := map[string]interface{}{ + "param1": "value1", + "param2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedWith, callFunction.With) +} diff --git a/model/action_data_filter_validator_test.go b/model/task_do.go similarity index 61% rename from model/action_data_filter_validator_test.go rename to model/task_do.go index df52da0..0b2673d 100644 --- a/model/action_data_filter_validator_test.go +++ b/model/task_do.go @@ -1,10 +1,10 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,8 @@ package model -import "testing" - -func TestActionDataFilterStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) +// DoTask represents a task configuration to execute tasks sequentially. +type DoTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Do *TaskList `json:"do" validate:"required,dive"` } diff --git a/model/task_do_test.go b/model/task_do_test.go new file mode 100644 index 0000000..4a337fe --- /dev/null +++ b/model/task_do_test.go @@ -0,0 +1,103 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDoTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var doTask DoTask + err := json.Unmarshal([]byte(jsonData), &doTask) + assert.NoError(t, err) + + task1 := doTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := doTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestDoTask_MarshalJSON(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(doTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestDoTask_Validation(t *testing.T) { + doTask := DoTask{ + TaskBase: TaskBase{}, + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(doTask) + assert.Error(t, err) +} diff --git a/model/task_event.go b/model/task_event.go new file mode 100644 index 0000000..8b97388 --- /dev/null +++ b/model/task_event.go @@ -0,0 +1,282 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// EmitTask represents the configuration for emitting events. +type EmitTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Emit EmitTaskConfiguration `json:"emit" validate:"required"` +} + +func (e *EmitTask) MarshalJSON() ([]byte, error) { + type Alias EmitTask // Prevent recursion + return json.Marshal((*Alias)(e)) +} + +// ListenTask represents a task configuration to listen to events. +type ListenTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Listen ListenTaskConfiguration `json:"listen" validate:"required"` +} + +type ListenTaskConfiguration struct { + To *EventConsumptionStrategy `json:"to" validate:"required"` +} + +// MarshalJSON for ListenTask to ensure proper serialization. +func (lt *ListenTask) MarshalJSON() ([]byte, error) { + type Alias ListenTask + return json.Marshal((*Alias)(lt)) +} + +// UnmarshalJSON for ListenTask to ensure proper deserialization. +func (lt *ListenTask) UnmarshalJSON(data []byte) error { + type Alias ListenTask + alias := (*Alias)(lt) + return json.Unmarshal(data, alias) +} + +type EmitTaskConfiguration struct { + Event EmitEventDefinition `json:"event" validate:"required"` +} + +type EmitEventDefinition struct { + With *EventProperties `json:"with" validate:"required"` +} + +type EventProperties struct { + ID string `json:"id,omitempty"` + Source *URITemplateOrRuntimeExpr `json:"source,omitempty" validate:"omitempty"` // URI template or runtime expression + Type string `json:"type,omitempty"` + Time *StringOrRuntimeExpr `json:"time,omitempty" validate:"omitempty,string_or_runtime_expr"` // ISO 8601 date-time string or runtime expression + Subject string `json:"subject,omitempty"` + DataContentType string `json:"datacontenttype,omitempty"` + DataSchema *URITemplateOrRuntimeExpr `json:"dataschema,omitempty" validate:"omitempty"` // URI template or runtime expression + Additional map[string]interface{} `json:"-"` +} + +// UnmarshalJSON implements custom unmarshaling for EventProperties. +func (e *EventProperties) UnmarshalJSON(data []byte) error { + type Alias EventProperties // Prevent recursion + alias := &struct { + Additional map[string]interface{} `json:"-"` // Inline the additional properties + *Alias + }{ + Alias: (*Alias)(e), + } + + // Decode the entire JSON into a map to capture additional properties + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties: %w", err) + } + + // Unmarshal known fields into the alias + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal EventProperties fields: %w", err) + } + + // Validate fields requiring custom unmarshaling + if e.Source != nil && e.Source.Value == nil { + return fmt.Errorf("invalid Source: must be a valid URI template or runtime expression") + } + + if e.DataSchema != nil && e.DataSchema.Value == nil { + return fmt.Errorf("invalid DataSchema: must be a valid URI template or runtime expression") + } + + // Extract additional properties by removing known keys + for key := range raw { + switch key { + case "id", "source", "type", "time", "subject", "datacontenttype", "dataschema": + delete(raw, key) + } + } + + e.Additional = raw + return nil +} + +// MarshalJSON implements custom marshaling for EventProperties. +func (e *EventProperties) MarshalJSON() ([]byte, error) { + // Create a map for known fields + known := make(map[string]interface{}) + + if e.ID != "" { + known["id"] = e.ID + } + if e.Source != nil { + known["source"] = e.Source + } + if e.Type != "" { + known["type"] = e.Type + } + if e.Time != nil { + known["time"] = e.Time + } + if e.Subject != "" { + known["subject"] = e.Subject + } + if e.DataContentType != "" { + known["datacontenttype"] = e.DataContentType + } + if e.DataSchema != nil { + known["dataschema"] = e.DataSchema + } + + // Merge additional properties + for key, value := range e.Additional { + known[key] = value + } + + return json.Marshal(known) +} + +// EventFilter defines a mechanism to filter events based on predefined criteria. +type EventFilter struct { + With *EventProperties `json:"with" validate:"required"` + Correlate map[string]Correlation `json:"correlate,omitempty" validate:"omitempty,dive"` // Keyed correlation filters +} + +// Correlation defines the mapping of event attributes for correlation. +type Correlation struct { + From string `json:"from" validate:"required"` // Runtime expression to extract the correlation value + Expect string `json:"expect,omitempty"` // Expected value or runtime expression +} + +// EventConsumptionStrategy defines the consumption strategy for events. +type EventConsumptionStrategy struct { + All []*EventFilter `json:"all,omitempty" validate:"omitempty,dive"` + Any []*EventFilter `json:"any,omitempty" validate:"omitempty,dive"` + One *EventFilter `json:"one,omitempty" validate:"omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty" validate:"omitempty"` +} + +// EventConsumptionUntil handles the complex conditions of the "until" field. +type EventConsumptionUntil struct { + Condition *RuntimeExpression `json:"-" validate:"omitempty"` + Strategy *EventConsumptionStrategy `json:"-" validate:"omitempty"` + IsDisabled bool `json:"-"` // True when "until: false" +} + +// UnmarshalJSON for EventConsumptionUntil to handle the "oneOf" behavior. +func (ecu *EventConsumptionUntil) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal EventConsumptionUntil: %w", err) + } + + switch v := raw.(type) { + case bool: + if !v { + ecu.IsDisabled = true + } else { + return fmt.Errorf("invalid value for 'until': true is not supported") + } + case string: + ecu.Condition = &RuntimeExpression{Value: v} + case map[string]interface{}: + strategyData, err := json.Marshal(v) + if err != nil { + return fmt.Errorf("failed to marshal 'until' strategy: %w", err) + } + var strategy EventConsumptionStrategy + if err := json.Unmarshal(strategyData, &strategy); err != nil { + return fmt.Errorf("failed to unmarshal 'until' strategy: %w", err) + } + ecu.Strategy = &strategy + default: + return fmt.Errorf("invalid type for 'until'") + } + + return nil +} + +// MarshalJSON for EventConsumptionUntil to handle proper serialization. +func (ecu *EventConsumptionUntil) MarshalJSON() ([]byte, error) { + if ecu.IsDisabled { + return json.Marshal(false) + } + if ecu.Condition != nil { + // Serialize the condition directly + return json.Marshal(ecu.Condition.Value) + } + if ecu.Strategy != nil { + // Serialize the nested strategy + return json.Marshal(ecu.Strategy) + } + // Return null if nothing is set + return json.Marshal(nil) +} + +// UnmarshalJSON for EventConsumptionStrategy to enforce "oneOf" behavior and handle edge cases. +func (ecs *EventConsumptionStrategy) UnmarshalJSON(data []byte) error { + temp := struct { + All []*EventFilter `json:"all"` + Any []*EventFilter `json:"any"` + One *EventFilter `json:"one"` + Until *EventConsumptionUntil `json:"until"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields (ignoring empty lists for `all` and `any`) + count := 0 + if len(temp.All) > 0 { + count++ + ecs.All = temp.All + } + if len(temp.Any) > 0 || temp.Until != nil { + count++ + ecs.Any = temp.Any + ecs.Until = temp.Until + } + if temp.One != nil { + count++ + ecs.One = temp.One + } + + // Ensure only one primary field (all, any, one) is set + if count > 1 { + return errors.New("invalid EventConsumptionStrategy: only one primary strategy type (all, any, or one) must be specified") + } + + return nil +} + +// MarshalJSON for EventConsumptionStrategy to ensure proper serialization. +func (ecs *EventConsumptionStrategy) MarshalJSON() ([]byte, error) { + temp := struct { + All []*EventFilter `json:"all,omitempty"` + Any []*EventFilter `json:"any,omitempty"` + One *EventFilter `json:"one,omitempty"` + Until *EventConsumptionUntil `json:"until,omitempty"` + }{ + All: ecs.All, + Any: ecs.Any, + One: ecs.One, + Until: ecs.Until, + } + + return json.Marshal(temp) +} diff --git a/model/task_event_test.go b/model/task_event_test.go new file mode 100644 index 0000000..45c92a7 --- /dev/null +++ b/model/task_event_test.go @@ -0,0 +1,231 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEmitTask_MarshalJSON(t *testing.T) { + emitTask := &EmitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Emit: EmitTaskConfiguration{ + Event: EmitEventDefinition{ + With: &EventProperties{ + ID: "event-id", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + Type: "example.event.type", + Time: &StringOrRuntimeExpr{Value: "2023-01-01T00:00:00Z"}, + Subject: "example.subject", + DataContentType: "application/json", + DataSchema: &URITemplateOrRuntimeExpr{Value: "http://example.com/schema"}, + Additional: map[string]interface{}{ + "extra": "value", + }, + }, + }, + }, + } + + data, err := json.Marshal(emitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }`, string(data)) +} + +func TestEmitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "emit": { + "event": { + "with": { + "id": "event-id", + "source": "http://example.com/source", + "type": "example.event.type", + "time": "2023-01-01T00:00:00Z", + "subject": "example.subject", + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "extra": "value" + } + } + } + }` + + var emitTask EmitTask + err := json.Unmarshal([]byte(jsonData), &emitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, emitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, emitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, emitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, emitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, emitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, emitTask.Metadata) + assert.Equal(t, "event-id", emitTask.Emit.Event.With.ID) + assert.Equal(t, "http://example.com/source", emitTask.Emit.Event.With.Source.String()) + assert.Equal(t, "example.event.type", emitTask.Emit.Event.With.Type) + assert.Equal(t, "2023-01-01T00:00:00Z", emitTask.Emit.Event.With.Time.String()) + assert.Equal(t, "example.subject", emitTask.Emit.Event.With.Subject) + assert.Equal(t, "application/json", emitTask.Emit.Event.With.DataContentType) + assert.Equal(t, "http://example.com/schema", emitTask.Emit.Event.With.DataSchema.String()) + assert.Equal(t, map[string]interface{}{"extra": "value"}, emitTask.Emit.Event.With.Additional) +} + +func TestListenTask_MarshalJSON_WithUntilCondition(t *testing.T) { + listenTask := ListenTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Listen: ListenTaskConfiguration{ + To: &EventConsumptionStrategy{ + Any: []*EventFilter{ + { + With: &EventProperties{ + Type: "example.event.type", + Source: &URITemplateOrRuntimeExpr{Value: "http://example.com/source"}, + }, + }, + }, + Until: &EventConsumptionUntil{ + Condition: NewRuntimeExpression("workflow.data.condition == true"), + }, + }, + }, + } + + data, err := json.Marshal(listenTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "listen": { + "to": { + "any": [ + { + "with": { + "type": "example.event.type", + "source": "http://example.com/source" + } + } + ], + "until": "workflow.data.condition == true" + } + } + }`, string(data)) +} + +func TestEventConsumptionUntil_MarshalJSON(t *testing.T) { + tests := []struct { + name string + until *EventConsumptionUntil + expected string + shouldErr bool + }{ + { + name: "Until Disabled", + until: &EventConsumptionUntil{ + IsDisabled: true, + }, + expected: `false`, + shouldErr: false, + }, + { + name: "Until Condition Set", + until: &EventConsumptionUntil{ + Condition: &RuntimeExpression{Value: "workflow.data.condition == true"}, + }, + expected: `"workflow.data.condition == true"`, + shouldErr: false, + }, + { + name: "Until Nested Strategy", + until: &EventConsumptionUntil{ + Strategy: &EventConsumptionStrategy{ + One: &EventFilter{ + With: &EventProperties{Type: "example.event.type"}, + }, + }, + }, + expected: `{"one":{"with":{"type":"example.event.type"}}}`, + shouldErr: false, + }, + { + name: "Until Nil", + until: &EventConsumptionUntil{}, + expected: `null`, + shouldErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.until) + if test.shouldErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expected, string(data)) + } + }) + } +} diff --git a/model/task_for.go b/model/task_for.go new file mode 100644 index 0000000..0e6811b --- /dev/null +++ b/model/task_for.go @@ -0,0 +1,30 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForTask represents a task configuration to iterate over a collection. +type ForTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + For ForTaskConfiguration `json:"for" validate:"required"` + While string `json:"while,omitempty"` + Do *TaskList `json:"do" validate:"required,dive"` +} + +// ForTaskConfiguration defines the loop configuration for iterating over a collection. +type ForTaskConfiguration struct { + Each string `json:"each,omitempty"` // Variable name for the current item + In string `json:"in" validate:"required"` // Runtime expression for the collection + At string `json:"at,omitempty"` // Variable name for the current index +} diff --git a/model/task_for_test.go b/model/task_for_test.go new file mode 100644 index 0000000..e24bf3b --- /dev/null +++ b/model/task_for_test.go @@ -0,0 +1,150 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "sigs.k8s.io/yaml" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestForTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ] + }` + + var forTask ForTask + err := json.Unmarshal([]byte(jsonData), &forTask) + assert.NoError(t, err) + assert.Equal(t, "item", forTask.For.Each) + assert.Equal(t, "${items}", forTask.For.In) + assert.Equal(t, "index", forTask.For.At) + assert.Equal(t, "${condition}", forTask.While) + + task1 := forTask.Do.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.String()) + + task2 := forTask.Do.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForTask_MarshalJSON(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + } + + data, err := json.Marshal(forTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "for": {"each": "item", "in": "${items}", "at": "index"}, + "while": "${condition}", + "do": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ] + }`, string(data)) +} + +func TestForTask_Validation(t *testing.T) { + forTask := ForTask{ + TaskBase: TaskBase{}, + For: ForTaskConfiguration{ + Each: "item", + In: "${items}", + At: "index", + }, + While: "${condition}", + Do: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + } + + err := validate.Struct(forTask) + assert.Error(t, err) +} + +func TestForTaskValidation(t *testing.T) { + rawYaml := ` +for: + each: pet + in: .pets + at: index +while: .vet != null +do: + - waitForCheckup: + listen: + to: + one: + with: + type: com.fake.petclinic.pets.checkup.completed.v2 + output: + as: '.pets + [{ "id": $pet.id }]' +` + + var forTask ForTask + err := yaml.Unmarshal([]byte(rawYaml), &forTask) + assert.NoError(t, err, "Failed to unmarshal ForTask") + + err = validate.Struct(forTask) + assert.NoError(t, err, "Failed to validate ForTask") +} diff --git a/model/task_fork.go b/model/task_fork.go new file mode 100644 index 0000000..3019d06 --- /dev/null +++ b/model/task_fork.go @@ -0,0 +1,27 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// ForkTask represents a task configuration to execute multiple tasks concurrently. +type ForkTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Fork ForkTaskConfiguration `json:"fork" validate:"required"` +} + +// ForkTaskConfiguration defines the configuration for the branches to perform concurrently. +type ForkTaskConfiguration struct { + Branches *TaskList `json:"branches" validate:"required,dive"` + Compete bool `json:"compete,omitempty"` +} diff --git a/model/task_fork_test.go b/model/task_fork_test.go new file mode 100644 index 0000000..04b4f19 --- /dev/null +++ b/model/task_fork_test.go @@ -0,0 +1,116 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestForkTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}} + ], + "compete": true + } + }` + + var forkTask ForkTask + err := json.Unmarshal([]byte(jsonData), &forkTask) + assert.NoError(t, err) + assert.Equal(t, true, forkTask.Fork.Compete) + + task1 := forkTask.Fork.Branches.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := forkTask.Fork.Branches.Key("task2").AsCallOpenAPITask() + assert.NotNil(t, task2) + assert.Equal(t, "openapi", task2.Call) + assert.Equal(t, "doc1", task2.With.Document.Name) + assert.Equal(t, "op1", task2.With.OperationID) +} + +func TestForkTask_MarshalJSON(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + data, err := json.Marshal(forkTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "fork": { + "branches": [ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}} + ], + "compete": true + } + }`, string(data)) +} + +func TestForkTask_Validation(t *testing.T) { + forkTask := ForkTask{ + TaskBase: TaskBase{}, + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1"}, //missing endpoint + OperationID: "op1", + }, + }}, + }, + Compete: true, + }, + } + + err := validate.Struct(forkTask) + assert.Error(t, err) +} diff --git a/model/task_raise.go b/model/task_raise.go new file mode 100644 index 0000000..b0c7499 --- /dev/null +++ b/model/task_raise.go @@ -0,0 +1,84 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +type Error struct { + Type *URITemplateOrRuntimeExpr `json:"type" validate:"required"` + Status int `json:"status" validate:"required"` + Title string `json:"title,omitempty"` + Detail string `json:"detail,omitempty"` + Instance *JsonPointerOrRuntimeExpression `json:"instance,omitempty" validate:"omitempty"` +} + +type ErrorFilter struct { + Type string `json:"type,omitempty"` + Status int `json:"status,omitempty"` + Instance string `json:"instance,omitempty"` + Title string `json:"title,omitempty"` + Details string `json:"details,omitempty"` +} + +// RaiseTask represents a task configuration to raise errors. +type RaiseTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Raise RaiseTaskConfiguration `json:"raise" validate:"required"` +} + +type RaiseTaskConfiguration struct { + Error RaiseTaskError `json:"error" validate:"required"` +} + +type RaiseTaskError struct { + Definition *Error `json:"-"` + Ref *string `json:"-"` +} + +// UnmarshalJSON for RaiseTaskError to enforce "oneOf" behavior. +func (rte *RaiseTaskError) UnmarshalJSON(data []byte) error { + // Try to unmarshal into a string (Ref) + var ref string + if err := json.Unmarshal(data, &ref); err == nil { + rte.Ref = &ref + rte.Definition = nil + return nil + } + + // Try to unmarshal into an Error (Definition) + var def Error + if err := json.Unmarshal(data, &def); err == nil { + rte.Definition = &def + rte.Ref = nil + return nil + } + + // If neither worked, return an error + return errors.New("invalid RaiseTaskError: data must be either a string (reference) or an object (definition)") +} + +// MarshalJSON for RaiseTaskError to ensure proper serialization. +func (rte *RaiseTaskError) MarshalJSON() ([]byte, error) { + if rte.Definition != nil { + return json.Marshal(rte.Definition) + } + if rte.Ref != nil { + return json.Marshal(*rte.Ref) + } + return nil, errors.New("invalid RaiseTaskError: neither 'definition' nor 'reference' is set") +} diff --git a/model/task_raise_test.go b/model/task_raise_test.go new file mode 100644 index 0000000..49ede54 --- /dev/null +++ b/model/task_raise_test.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRaiseTask_MarshalJSON(t *testing.T) { + raiseTask := &RaiseTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Raise: RaiseTaskConfiguration{ + Error: RaiseTaskError{ + Definition: &Error{ + Type: &URITemplateOrRuntimeExpr{Value: "http://example.com/error"}, + Status: 500, + Title: "Internal Server Error", + Detail: "An unexpected error occurred.", + }, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }`, string(data)) +} + +func TestRaiseTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "raise": { + "error": { + "type": "http://example.com/error", + "status": 500, + "title": "Internal Server Error", + "detail": "An unexpected error occurred." + } + } + }` + + var raiseTask *RaiseTask + err := json.Unmarshal([]byte(jsonData), &raiseTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, raiseTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, raiseTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, raiseTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, raiseTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, raiseTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, raiseTask.Metadata) + assert.Equal(t, "http://example.com/error", raiseTask.Raise.Error.Definition.Type.String()) + assert.Equal(t, 500, raiseTask.Raise.Error.Definition.Status) + assert.Equal(t, "Internal Server Error", raiseTask.Raise.Error.Definition.Title) + assert.Equal(t, "An unexpected error occurred.", raiseTask.Raise.Error.Definition.Detail) +} diff --git a/model/task_run.go b/model/task_run.go new file mode 100644 index 0000000..6942013 --- /dev/null +++ b/model/task_run.go @@ -0,0 +1,124 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" +) + +// RunTask represents a task configuration to execute external processes. +type RunTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Run RunTaskConfiguration `json:"run" validate:"required"` +} + +type RunTaskConfiguration struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` +} + +type Container struct { + Image string `json:"image" validate:"required"` + Command string `json:"command,omitempty"` + Ports map[string]interface{} `json:"ports,omitempty"` + Volumes map[string]interface{} `json:"volumes,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type Script struct { + Language string `json:"language" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` + InlineCode *string `json:"code,omitempty"` + External *ExternalResource `json:"source,omitempty"` +} + +type Shell struct { + Command string `json:"command" validate:"required"` + Arguments map[string]interface{} `json:"arguments,omitempty"` + Environment map[string]string `json:"environment,omitempty"` +} + +type RunWorkflow struct { + Namespace string `json:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" validate:"required,semver_pattern"` + Input map[string]interface{} `json:"input,omitempty"` +} + +// UnmarshalJSON for RunTaskConfiguration to enforce "oneOf" behavior. +func (rtc *RunTaskConfiguration) UnmarshalJSON(data []byte) error { + temp := struct { + Await *bool `json:"await"` + Container *Container `json:"container"` + Script *Script `json:"script"` + Shell *Shell `json:"shell"` + Workflow *RunWorkflow `json:"workflow"` + }{} + + if err := json.Unmarshal(data, &temp); err != nil { + return err + } + + // Count non-nil fields + count := 0 + if temp.Container != nil { + count++ + rtc.Container = temp.Container + } + if temp.Script != nil { + count++ + rtc.Script = temp.Script + } + if temp.Shell != nil { + count++ + rtc.Shell = temp.Shell + } + if temp.Workflow != nil { + count++ + rtc.Workflow = temp.Workflow + } + + // Ensure only one of the options is set + if count != 1 { + return errors.New("invalid RunTaskConfiguration: only one of 'container', 'script', 'shell', or 'workflow' must be specified") + } + + rtc.Await = temp.Await + return nil +} + +// MarshalJSON for RunTaskConfiguration to ensure proper serialization. +func (rtc *RunTaskConfiguration) MarshalJSON() ([]byte, error) { + temp := struct { + Await *bool `json:"await,omitempty"` + Container *Container `json:"container,omitempty"` + Script *Script `json:"script,omitempty"` + Shell *Shell `json:"shell,omitempty"` + Workflow *RunWorkflow `json:"workflow,omitempty"` + }{ + Await: rtc.Await, + Container: rtc.Container, + Script: rtc.Script, + Shell: rtc.Shell, + Workflow: rtc.Workflow, + } + + return json.Marshal(temp) +} diff --git a/model/task_run_test.go b/model/task_run_test.go new file mode 100644 index 0000000..026b9c8 --- /dev/null +++ b/model/task_run_test.go @@ -0,0 +1,196 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRunTask_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Container: &Container{ + Image: "example-image", + Command: "example-command", + Ports: map[string]interface{}{ + "8080": "80", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }`, string(data)) +} + +func TestRunTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "container": { + "image": "example-image", + "command": "example-command", + "ports": {"8080": "80"}, + "environment": {"ENV_VAR": "value"} + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "example-image", runTask.Run.Container.Image) + assert.Equal(t, "example-command", runTask.Run.Container.Command) + assert.Equal(t, map[string]interface{}{"8080": "80"}, runTask.Run.Container.Ports) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Container.Environment) +} + +func TestRunTaskScript_MarshalJSON(t *testing.T) { + runTask := RunTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Run: RunTaskConfiguration{ + Await: boolPtr(true), + Script: &Script{ + Language: "python", + Arguments: map[string]interface{}{ + "arg1": "value1", + }, + Environment: map[string]string{ + "ENV_VAR": "value", + }, + InlineCode: stringPtr("print('Hello, World!')"), + }, + }, + } + + data, err := json.Marshal(runTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }`, string(data)) +} + +func TestRunTaskScript_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "run": { + "await": true, + "script": { + "language": "python", + "arguments": {"arg1": "value1"}, + "environment": {"ENV_VAR": "value"}, + "code": "print('Hello, World!')" + } + } + }` + + var runTask RunTask + err := json.Unmarshal([]byte(jsonData), &runTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, runTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, runTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, runTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, runTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, runTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, runTask.Metadata) + assert.Equal(t, true, *runTask.Run.Await) + assert.Equal(t, "python", runTask.Run.Script.Language) + assert.Equal(t, map[string]interface{}{"arg1": "value1"}, runTask.Run.Script.Arguments) + assert.Equal(t, map[string]string{"ENV_VAR": "value"}, runTask.Run.Script.Environment) + assert.Equal(t, "print('Hello, World!')", *runTask.Run.Script.InlineCode) +} + +func boolPtr(b bool) *bool { + return &b +} + +func stringPtr(s string) *string { + return &s +} diff --git a/model/task_set.go b/model/task_set.go new file mode 100644 index 0000000..654c48f --- /dev/null +++ b/model/task_set.go @@ -0,0 +1,36 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SetTask represents a task used to set data. +type SetTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Set map[string]interface{} `json:"set" validate:"required,min=1,dive"` +} + +// MarshalJSON for SetTask to ensure proper serialization. +func (st *SetTask) MarshalJSON() ([]byte, error) { + type Alias SetTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SetTask to ensure proper deserialization. +func (st *SetTask) UnmarshalJSON(data []byte) error { + type Alias SetTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_set_test.go b/model/task_set_test.go new file mode 100644 index 0000000..49781af --- /dev/null +++ b/model/task_set_test.go @@ -0,0 +1,104 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSetTask_MarshalJSON(t *testing.T) { + setTask := SetTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Set: map[string]interface{}{ + "key1": "value1", + "key2": 42, + }, + } + + data, err := json.Marshal(setTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }`, string(data)) +} + +func TestSetTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "set": { + "key1": "value1", + "key2": 42 + } + }` + + var setTask SetTask + err := json.Unmarshal([]byte(jsonData), &setTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, setTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, setTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, setTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, setTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, setTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, setTask.Metadata) + expectedSet := map[string]interface{}{ + "key1": "value1", + "key2": float64(42), // Match JSON unmarshaling behavior + } + assert.Equal(t, expectedSet, setTask.Set) +} + +func TestSetTask_Validation(t *testing.T) { + // Valid SetTask + setTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{ + "key": "value", + }, + } + assert.NoError(t, validate.Struct(setTask)) + + // Invalid SetTask (empty set) + invalidSetTask := SetTask{ + TaskBase: TaskBase{}, + Set: map[string]interface{}{}, + } + assert.Error(t, validate.Struct(invalidSetTask)) +} diff --git a/model/task_switch.go b/model/task_switch.go new file mode 100644 index 0000000..d63b2e7 --- /dev/null +++ b/model/task_switch.go @@ -0,0 +1,44 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import "encoding/json" + +// SwitchTask represents a task configuration for conditional branching. +type SwitchTask struct { + TaskBase `json:",inline"` // Inline TaskBase fields + Switch []SwitchItem `json:"switch" validate:"required,min=1,dive,switch_item"` +} + +type SwitchItem map[string]SwitchCase + +// SwitchCase defines a condition and the corresponding outcome for a switch task. +type SwitchCase struct { + When *RuntimeExpression `json:"when,omitempty"` + Then *FlowDirective `json:"then" validate:"required"` +} + +// MarshalJSON for SwitchTask to ensure proper serialization. +func (st *SwitchTask) MarshalJSON() ([]byte, error) { + type Alias SwitchTask + return json.Marshal((*Alias)(st)) +} + +// UnmarshalJSON for SwitchTask to ensure proper deserialization. +func (st *SwitchTask) UnmarshalJSON(data []byte) error { + type Alias SwitchTask + alias := (*Alias)(st) + return json.Unmarshal(data, alias) +} diff --git a/model/task_switch_test.go b/model/task_switch_test.go new file mode 100644 index 0000000..3c40b5a --- /dev/null +++ b/model/task_switch_test.go @@ -0,0 +1,151 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSwitchTask_MarshalJSON(t *testing.T) { + switchTask := &SwitchTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + { + "case2": SwitchCase{ + When: &RuntimeExpression{Value: "${condition2}"}, + Then: &FlowDirective{Value: "end"}, + }, + }, + }, + } + + data, err := json.Marshal(switchTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }`, string(data)) +} + +func TestSwitchTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "switch": [ + { + "case1": { + "when": "${condition1}", + "then": "next" + } + }, + { + "case2": { + "when": "${condition2}", + "then": "end" + } + } + ] + }` + + var switchTask SwitchTask + err := json.Unmarshal([]byte(jsonData), &switchTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, switchTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, switchTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, switchTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, switchTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, switchTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, switchTask.Metadata) + assert.Equal(t, 2, len(switchTask.Switch)) + assert.Equal(t, &RuntimeExpression{Value: "${condition1}"}, switchTask.Switch[0]["case1"].When) + assert.Equal(t, &FlowDirective{Value: "next"}, switchTask.Switch[0]["case1"].Then) + assert.Equal(t, &RuntimeExpression{Value: "${condition2}"}, switchTask.Switch[1]["case2"].When) + assert.Equal(t, &FlowDirective{Value: "end"}, switchTask.Switch[1]["case2"].Then) +} + +func TestSwitchTask_Validation(t *testing.T) { + // Valid SwitchTask + switchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{ + When: &RuntimeExpression{Value: "${condition1}"}, + Then: &FlowDirective{Value: "next"}, + }, + }, + }, + } + assert.NoError(t, validate.Struct(switchTask)) + + // Invalid SwitchTask (empty switch) + invalidSwitchTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{}, + } + assert.Error(t, validate.Struct(invalidSwitchTask)) + + // Invalid SwitchTask (SwitchItem with multiple keys) + invalidSwitchItemTask := SwitchTask{ + TaskBase: TaskBase{}, + Switch: []SwitchItem{ + { + "case1": SwitchCase{When: &RuntimeExpression{Value: "${condition1}"}, Then: &FlowDirective{Value: "next"}}, + "case2": SwitchCase{When: &RuntimeExpression{Value: "${condition2}"}, Then: &FlowDirective{Value: "end"}}, + }, + }, + } + assert.Error(t, validate.Struct(invalidSwitchItemTask)) +} diff --git a/model/task_test.go b/model/task_test.go new file mode 100644 index 0000000..6fa5019 --- /dev/null +++ b/model/task_test.go @@ -0,0 +1,121 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" +) + +func TestTaskList_UnmarshalJSON(t *testing.T) { + jsonData := `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1"}, "operationId": "op1"}}}]}} + ]` + + var taskList TaskList + err := json.Unmarshal([]byte(jsonData), &taskList) + assert.NoError(t, err) + assert.Equal(t, 2, len(taskList)) + + task1 := taskList.Key("task1").AsCallHTTPTask() + assert.NotNil(t, task1) + assert.Equal(t, "http", task1.Call) + assert.Equal(t, "GET", task1.With.Method) + assert.Equal(t, "http://example.com", task1.With.Endpoint.URITemplate.String()) + + task2 := taskList.Key("task2").AsDoTask() + assert.NotNil(t, task2) + assert.Equal(t, 1, len(*task2.Do)) + + task3 := task2.Do.Key("task3").AsCallOpenAPITask() + assert.NotNil(t, task3) + assert.Equal(t, "openapi", task3.Call) + assert.Equal(t, "doc1", task3.With.Document.Name) + assert.Equal(t, "op1", task3.With.OperationID) +} + +func TestTaskList_MarshalJSON(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: &Endpoint{URITemplate: &LiteralUri{Value: "http://example.com"}}, + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + data, err := json.Marshal(taskList) + assert.NoError(t, err) + assert.JSONEq(t, `[ + {"task1": {"call": "http", "with": {"method": "GET", "endpoint": "http://example.com"}}}, + {"task2": {"do": [{"task3": {"call": "openapi", "with": {"document": {"name": "doc1", "endpoint": "http://example.com"}, "operationId": "op1"}}}]}} + ]`, string(data)) +} + +func TestTaskList_Validation(t *testing.T) { + taskList := TaskList{ + {Key: "task1", Task: &CallHTTP{ + Call: "http", + With: HTTPArguments{ + Method: "GET", + Endpoint: NewEndpoint("http://example.com"), + }, + }}, + {Key: "task2", Task: &DoTask{ + Do: &TaskList{ + {Key: "task3", Task: &CallOpenAPI{ + Call: "openapi", + With: OpenAPIArguments{ + Document: &ExternalResource{Name: "doc1", Endpoint: NewEndpoint("http://example.com")}, + OperationID: "op1", + }, + }}, + }, + }}, + } + + // Validate each TaskItem explicitly + for _, taskItem := range taskList { + err := validate.Struct(taskItem) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Errorf("Validation failed on field '%s' with tag '%s'", validationErr.Field(), validationErr.Tag()) + } + } else { + t.Errorf("Unexpected error: %v", err) + } + } + } + +} diff --git a/model/task_try.go b/model/task_try.go new file mode 100644 index 0000000..91d3797 --- /dev/null +++ b/model/task_try.go @@ -0,0 +1,202 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +type TryTask struct { + TaskBase `json:",inline"` + Try *TaskList `json:"try" validate:"required,dive"` + Catch *TryTaskCatch `json:"catch" validate:"required"` +} + +type TryTaskCatch struct { + Errors struct { + With *ErrorFilter `json:"with,omitempty"` + } `json:"errors,omitempty"` + As string `json:"as,omitempty"` + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Retry *RetryPolicy `json:"retry,omitempty"` + Do *TaskList `json:"do,omitempty" validate:"omitempty,dive"` +} + +// RetryPolicy defines a retry policy. +type RetryPolicy struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + Ref string `json:"-"` // Reference to a reusable retry policy +} + +// MarshalJSON for RetryPolicy to ensure proper serialization. +func (rp *RetryPolicy) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + When *RuntimeExpression `json:"when,omitempty"` + ExceptWhen *RuntimeExpression `json:"exceptWhen,omitempty"` + Delay *Duration `json:"delay,omitempty"` + Backoff *RetryBackoff `json:"backoff,omitempty"` + Limit RetryLimit `json:"limit,omitempty"` + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + }{ + When: rp.When, + ExceptWhen: rp.ExceptWhen, + Delay: rp.Delay, + Backoff: rp.Backoff, + Limit: rp.Limit, + Jitter: rp.Jitter, + }) +} + +// UnmarshalJSON for RetryPolicy to ensure proper deserialization. +func (rp *RetryPolicy) UnmarshalJSON(data []byte) error { + var raw interface{} + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy: %w", err) + } + + switch v := raw.(type) { + case string: + // If it's a string, treat it as a reference + rp.Ref = v + case map[string]interface{}: + // If it's an object, unmarshal into the struct + type Alias RetryPolicy + alias := &struct { + *Alias + }{ + Alias: (*Alias)(rp), + } + if err := json.Unmarshal(data, alias); err != nil { + return fmt.Errorf("failed to unmarshal RetryPolicy object: %w", err) + } + default: + return fmt.Errorf("invalid RetryPolicy type: %T", v) + } + + return nil +} + +func (rp *RetryPolicy) ResolveReference(retries map[string]*RetryPolicy) error { + if rp.Ref == "" { + // No reference to resolve + return nil + } + + resolved, exists := retries[rp.Ref] + if !exists { + return fmt.Errorf("retry policy reference %q not found", rp.Ref) + } + + // Copy resolved policy fields into the current RetryPolicy + *rp = *resolved + rp.Ref = "" // Clear the reference to avoid confusion + + return nil +} + +func ResolveRetryPolicies(tasks []TryTaskCatch, retries map[string]*RetryPolicy) error { + for i := range tasks { + if tasks[i].Retry != nil { + if err := tasks[i].Retry.ResolveReference(retries); err != nil { + return fmt.Errorf("failed to resolve retry policy for task %q: %w", tasks[i].As, err) + } + } + } + return nil +} + +// RetryBackoff defines the retry backoff strategies. +type RetryBackoff struct { + Constant *BackoffDefinition `json:"constant,omitempty"` + Exponential *BackoffDefinition `json:"exponential,omitempty"` + Linear *BackoffDefinition `json:"linear,omitempty"` +} + +// MarshalJSON for RetryBackoff to ensure oneOf behavior. +func (rb *RetryBackoff) MarshalJSON() ([]byte, error) { + switch { + case rb.Constant != nil: + return json.Marshal(map[string]interface{}{"constant": rb.Constant.Definition}) + case rb.Exponential != nil: + return json.Marshal(map[string]interface{}{"exponential": rb.Exponential.Definition}) + case rb.Linear != nil: + return json.Marshal(map[string]interface{}{"linear": rb.Linear.Definition}) + default: + return nil, errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") + } +} + +func (rb *RetryBackoff) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return fmt.Errorf("failed to unmarshal RetryBackoff: %w", err) + } + + if rawConstant, ok := raw["constant"]; ok { + rb.Constant = &BackoffDefinition{} + if err := json.Unmarshal(rawConstant, &rb.Constant.Definition); err != nil { + return fmt.Errorf("failed to unmarshal constant backoff: %w", err) + } + return nil + } + + if rawExponential, ok := raw["exponential"]; ok { + rb.Exponential = &BackoffDefinition{} + if err := json.Unmarshal(rawExponential, &rb.Exponential.Definition); err != nil { + return fmt.Errorf("failed to unmarshal exponential backoff: %w", err) + } + return nil + } + + if rawLinear, ok := raw["linear"]; ok { + rb.Linear = &BackoffDefinition{} + if err := json.Unmarshal(rawLinear, &rb.Linear.Definition); err != nil { + return fmt.Errorf("failed to unmarshal linear backoff: %w", err) + } + return nil + } + + return errors.New("RetryBackoff must have one of 'constant', 'exponential', or 'linear' defined") +} + +type BackoffDefinition struct { + Definition map[string]interface{} `json:"definition,omitempty"` +} + +// RetryLimit defines the retry limit configurations. +type RetryLimit struct { + Attempt *RetryLimitAttempt `json:"attempt,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryLimitAttempt defines the limit for each retry attempt. +type RetryLimitAttempt struct { + Count int `json:"count,omitempty"` + Duration *Duration `json:"duration,omitempty"` +} + +// RetryPolicyJitter defines the randomness or variability of retry delays. +type RetryPolicyJitter struct { + From *Duration `json:"from" validate:"required"` + To *Duration `json:"to" validate:"required"` +} diff --git a/model/task_try_test.go b/model/task_try_test.go new file mode 100644 index 0000000..4daf839 --- /dev/null +++ b/model/task_try_test.go @@ -0,0 +1,171 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRetryPolicy_MarshalJSON(t *testing.T) { + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{ + Definition: map[string]interface{}{"factor": 2}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + + data, err := json.Marshal(retryPolicy) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }`, string(data)) +} + +func TestRetryPolicy_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "when": "${someCondition}", + "exceptWhen": "${someOtherCondition}", + "delay": "PT5S", + "backoff": {"exponential": {"factor": 2}}, + "limit": { + "attempt": {"count": 3, "duration": "PT1M"}, + "duration": "PT10M" + }, + "jitter": {"from": "PT1S", "to": "PT3S"} + }` + + var retryPolicy RetryPolicy + err := json.Unmarshal([]byte(jsonData), &retryPolicy) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{"${someCondition}"}, retryPolicy.When) + assert.Equal(t, &RuntimeExpression{"${someOtherCondition}"}, retryPolicy.ExceptWhen) + assert.Equal(t, NewDurationExpr("PT5S"), retryPolicy.Delay) + assert.NotNil(t, retryPolicy.Backoff.Exponential) + assert.Equal(t, map[string]interface{}{"factor": float64(2)}, retryPolicy.Backoff.Exponential.Definition) + assert.Equal(t, 3, retryPolicy.Limit.Attempt.Count) + assert.Equal(t, NewDurationExpr("PT1M"), retryPolicy.Limit.Attempt.Duration) + assert.Equal(t, NewDurationExpr("PT10M"), retryPolicy.Limit.Duration) + assert.Equal(t, NewDurationExpr("PT1S"), retryPolicy.Jitter.From) + assert.Equal(t, NewDurationExpr("PT3S"), retryPolicy.Jitter.To) +} + +func TestRetryPolicy_Validation(t *testing.T) { + // Valid RetryPolicy + retryPolicy := RetryPolicy{ + When: &RuntimeExpression{"${someCondition}"}, + ExceptWhen: &RuntimeExpression{"${someOtherCondition}"}, + Delay: NewDurationExpr("PT5S"), + Backoff: &RetryBackoff{ + Constant: &BackoffDefinition{ + Definition: map[string]interface{}{"delay": 5}, + }, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{ + Count: 3, + Duration: NewDurationExpr("PT1M"), + }, + Duration: NewDurationExpr("PT10M"), + }, + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + To: NewDurationExpr("PT3S"), + }, + } + assert.NoError(t, validate.Struct(retryPolicy)) + + // Invalid RetryPolicy (missing required fields in Jitter) + invalidRetryPolicy := RetryPolicy{ + Jitter: &RetryPolicyJitter{ + From: NewDurationExpr("PT1S"), + }, + } + assert.Error(t, validate.Struct(invalidRetryPolicy)) +} + +func TestRetryPolicy_UnmarshalJSON_WithReference(t *testing.T) { + retries := map[string]*RetryPolicy{ + "default": { + Delay: &Duration{DurationInline{Seconds: 3}}, + Backoff: &RetryBackoff{ + Exponential: &BackoffDefinition{}, + }, + Limit: RetryLimit{ + Attempt: &RetryLimitAttempt{Count: 5}, + }, + }, + } + + jsonData := `{ + "retry": "default" + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + // Resolve the reference + err = task.Retry.ResolveReference(retries) + assert.NoError(t, err) + + assert.Equal(t, retries["default"].Delay, task.Retry.Delay) + assert.Equal(t, retries["default"].Backoff, task.Retry.Backoff) + assert.Equal(t, retries["default"].Limit, task.Retry.Limit) +} + +func TestRetryPolicy_UnmarshalJSON_Inline(t *testing.T) { + jsonData := `{ + "retry": { + "delay": { "seconds": 3 }, + "backoff": { "exponential": {} }, + "limit": { "attempt": { "count": 5 } } + } + }` + + var task TryTaskCatch + err := json.Unmarshal([]byte(jsonData), &task) + assert.NoError(t, err) + + assert.NotNil(t, task.Retry) + assert.Equal(t, int32(3), task.Retry.Delay.AsInline().Seconds) + assert.NotNil(t, task.Retry.Backoff.Exponential) + assert.Equal(t, 5, task.Retry.Limit.Attempt.Count) +} diff --git a/model/task_wait.go b/model/task_wait.go new file mode 100644 index 0000000..41b5cc5 --- /dev/null +++ b/model/task_wait.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// WaitTask represents a task configuration to delay execution for a specified duration. +type WaitTask struct { + TaskBase `json:",inline"` + Wait *Duration `json:"wait" validate:"required"` +} + +// MarshalJSON for WaitTask to ensure proper serialization. +func (wt *WaitTask) MarshalJSON() ([]byte, error) { + type Alias WaitTask + waitData, err := json.Marshal(wt.Wait) + if err != nil { + return nil, err + } + + alias := struct { + Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (Alias)(*wt), + Wait: waitData, + } + + return json.Marshal(alias) +} + +// UnmarshalJSON for WaitTask to ensure proper deserialization. +func (wt *WaitTask) UnmarshalJSON(data []byte) error { + type Alias WaitTask + alias := struct { + *Alias + Wait json.RawMessage `json:"wait"` + }{ + Alias: (*Alias)(wt), + } + + // Unmarshal data into alias + if err := json.Unmarshal(data, &alias); err != nil { + return fmt.Errorf("failed to unmarshal WaitTask: %w", err) + } + + // Unmarshal Wait field + if err := json.Unmarshal(alias.Wait, &wt.Wait); err != nil { + return fmt.Errorf("failed to unmarshal Wait field: %w", err) + } + + return nil +} diff --git a/model/task_wait_test.go b/model/task_wait_test.go new file mode 100644 index 0000000..6dda965 --- /dev/null +++ b/model/task_wait_test.go @@ -0,0 +1,88 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestWaitTask_MarshalJSON(t *testing.T) { + waitTask := &WaitTask{ + TaskBase: TaskBase{ + If: &RuntimeExpression{Value: "${condition}"}, + Input: &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, + Output: &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, + Timeout: &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, + Then: &FlowDirective{Value: "continue"}, + Metadata: map[string]interface{}{ + "meta": "data", + }, + }, + Wait: NewDurationExpr("P1DT1H"), + } + + data, err := json.Marshal(waitTask) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }`, string(data)) +} + +func TestWaitTask_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "if": "${condition}", + "input": { "from": {"key": "value"} }, + "output": { "as": {"result": "output"} }, + "timeout": { "after": "10s" }, + "then": "continue", + "metadata": {"meta": "data"}, + "wait": "P1DT1H" + }` + + waitTask := &WaitTask{} + err := json.Unmarshal([]byte(jsonData), waitTask) + assert.NoError(t, err) + assert.Equal(t, &RuntimeExpression{Value: "${condition}"}, waitTask.If) + assert.Equal(t, &Input{From: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"key": "value"}}}, waitTask.Input) + assert.Equal(t, &Output{As: &ObjectOrRuntimeExpr{Value: map[string]interface{}{"result": "output"}}}, waitTask.Output) + assert.Equal(t, &TimeoutOrReference{Timeout: &Timeout{After: NewDurationExpr("10s")}}, waitTask.Timeout) + assert.Equal(t, &FlowDirective{Value: "continue"}, waitTask.Then) + assert.Equal(t, map[string]interface{}{"meta": "data"}, waitTask.Metadata) + assert.Equal(t, NewDurationExpr("P1DT1H"), waitTask.Wait) +} + +func TestWaitTask_Validation(t *testing.T) { + // Valid WaitTask + waitTask := &WaitTask{ + TaskBase: TaskBase{}, + Wait: NewDurationExpr("P1DT1H"), + } + assert.NoError(t, validate.Struct(waitTask)) + + // Invalid WaitTask (empty wait) + invalidWaitTask := &WaitTask{ + TaskBase: TaskBase{}, + } + assert.Error(t, validate.Struct(invalidWaitTask)) +} diff --git a/model/timeout.go b/model/timeout.go new file mode 100644 index 0000000..dd63af8 --- /dev/null +++ b/model/timeout.go @@ -0,0 +1,232 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" +) + +// Timeout specifies a time limit for tasks or workflows. +type Timeout struct { + // After The duration after which to timeout + After *Duration `json:"after" validate:"required"` +} + +// UnmarshalJSON implements custom unmarshalling for Timeout. +func (t *Timeout) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + // Check if "after" key exists + afterData, ok := raw["after"] + if !ok { + return errors.New("missing 'after' key in Timeout JSON") + } + + // Unmarshal "after" using the Duration type + if err := json.Unmarshal(afterData, &t.After); err != nil { + return err + } + + return nil +} + +// MarshalJSON implements custom marshalling for Timeout. +func (t *Timeout) MarshalJSON() ([]byte, error) { + // Check the type of t.After.Value + switch v := t.After.Value.(type) { + case DurationInline: + // Serialize inline duration + return json.Marshal(map[string]interface{}{ + "after": v, + }) + case DurationExpression: + // Serialize expression as a simple string + return json.Marshal(map[string]string{ + "after": v.Expression, + }) + case string: + // Handle direct string values as DurationExpression + return json.Marshal(map[string]string{ + "after": v, + }) + default: + return nil, errors.New("unknown Duration type in Timeout") + } +} + +// TimeoutOrReference handles either a Timeout definition or a reference (string). +type TimeoutOrReference struct { + Timeout *Timeout `json:"-" validate:"required_without=Ref"` + Reference *string `json:"-" validate:"required_without=Timeout"` +} + +// UnmarshalJSON implements custom unmarshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) UnmarshalJSON(data []byte) error { + // Attempt to unmarshal as a Timeout + var asTimeout Timeout + if err := json.Unmarshal(data, &asTimeout); err == nil { + tr.Timeout = &asTimeout + tr.Reference = nil + return nil + } + + // Attempt to unmarshal as a string (reference) + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + tr.Reference = &asString + tr.Timeout = nil + return nil + } + + // If neither works, return an error + return errors.New("invalid TimeoutOrReference: must be a Timeout or a string reference") +} + +// MarshalJSON implements custom marshalling for TimeoutOrReference. +func (tr *TimeoutOrReference) MarshalJSON() ([]byte, error) { + // Marshal as a Timeout if present + if tr.Timeout != nil { + return json.Marshal(tr.Timeout) + } + + // Marshal as a string reference if present + if tr.Reference != nil { + return json.Marshal(tr.Reference) + } + + return nil, errors.New("invalid TimeoutOrReference: neither Timeout nor Ref is set") +} + +// Duration represents a flexible duration that can be either inline or an ISO 8601 expression. +type Duration struct { + Value interface{} `json:"-"` +} + +// NewDurationExpr accessor to create a Duration object from a string +func NewDurationExpr(durationExpression string) *Duration { + return &Duration{DurationExpression{durationExpression}} +} + +func (d *Duration) AsExpression() string { + switch v := d.Value.(type) { + case string: + return v + case DurationExpression: + return v.String() + default: + return "" + } +} + +func (d *Duration) AsInline() *DurationInline { + switch v := d.Value.(type) { + case DurationInline: + return &v + default: + return nil + } +} + +// UnmarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) UnmarshalJSON(data []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err == nil { + validKeys := map[string]bool{"days": true, "hours": true, "minutes": true, "seconds": true, "milliseconds": true} + for key := range raw { + if !validKeys[key] { + return fmt.Errorf("unexpected key '%s' in duration object", key) + } + } + + inline := DurationInline{} + if err := json.Unmarshal(data, &inline); err != nil { + return fmt.Errorf("failed to unmarshal DurationInline: %w", err) + } + d.Value = inline + return nil + } + + var asString string + if err := json.Unmarshal(data, &asString); err == nil { + d.Value = DurationExpression{Expression: asString} + return nil + } + + return errors.New("data must be a valid duration string or object") +} + +// MarshalJSON for Duration to handle both inline and expression durations. +func (d *Duration) MarshalJSON() ([]byte, error) { + switch v := d.Value.(type) { + case DurationInline: + return json.Marshal(v) + case DurationExpression: + return json.Marshal(v.Expression) + case string: + durationExpression := &DurationExpression{Expression: v} + return json.Marshal(durationExpression) + default: + return nil, errors.New("unknown Duration type") + } +} + +// DurationInline represents the inline definition of a duration. +type DurationInline struct { + Days int32 `json:"days,omitempty"` + Hours int32 `json:"hours,omitempty"` + Minutes int32 `json:"minutes,omitempty"` + Seconds int32 `json:"seconds,omitempty"` + Milliseconds int32 `json:"milliseconds,omitempty"` +} + +// MarshalJSON for DurationInline. +func (d *DurationInline) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "days": d.Days, + "hours": d.Hours, + "minutes": d.Minutes, + "seconds": d.Seconds, + "milliseconds": d.Milliseconds, + }) +} + +// DurationExpression represents the ISO 8601 expression of a duration. +type DurationExpression struct { + Expression string `json:"-" validate:"required,iso8601_duration"` +} + +func (d *DurationExpression) String() string { + return d.Expression +} + +// MarshalJSON for DurationExpression. +func (d *DurationExpression) MarshalJSON() ([]byte, error) { + return json.Marshal(d.Expression) +} + +// UnmarshalJSON for DurationExpression to handle ISO 8601 strings. +func (d *DurationExpression) UnmarshalJSON(data []byte) error { + var asString string + if err := json.Unmarshal(data, &asString); err != nil { + return err + } + d.Expression = asString + return nil +} diff --git a/model/timeout_test.go b/model/timeout_test.go new file mode 100644 index 0000000..ae17555 --- /dev/null +++ b/model/timeout_test.go @@ -0,0 +1,228 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTimeout_UnmarshalJSON(t *testing.T) { + // Test cases for Timeout unmarshalling + tests := []struct { + name string + jsonStr string + expect *Timeout + err bool + }{ + { + name: "Valid inline duration", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + err: false, + }, + { + name: "Valid ISO 8601 duration", + jsonStr: `{"after": "P1Y2M3DT4H5M6S"}`, + expect: &Timeout{ + After: NewDurationExpr("P1Y2M3DT4H5M6S"), + }, + err: false, + }, + { + name: "Invalid duration type", + jsonStr: `{"after": {"unknown": "value"}}`, + expect: nil, + err: true, + }, + { + name: "Missing after key", + jsonStr: `{}`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var timeout Timeout + err := json.Unmarshal([]byte(test.jsonStr), &timeout) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &timeout) + } + }) + } +} + +func TestTimeout_MarshalJSON(t *testing.T) { + tests := []struct { + name string + input *Timeout + expected string + wantErr bool + }{ + { + name: "ISO 8601 Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationExpression{Expression: "PT1H"}, + }, + }, + expected: `{"after":"PT1H"}`, + wantErr: false, + }, + { + name: "Inline Duration", + input: &Timeout{ + After: &Duration{ + Value: DurationInline{ + Days: 1, + Hours: 2, + Minutes: 30, + }, + }, + }, + expected: `{"after":{"days":1,"hours":2,"minutes":30}}`, + wantErr: false, + }, + { + name: "Invalid Duration", + input: &Timeout{After: &Duration{Value: 123}}, + expected: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := json.Marshal(tt.input) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, tt.expected, string(data)) + } + }) + } +} + +func TestTimeoutOrReference_UnmarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference unmarshalling + tests := []struct { + name string + jsonStr string + expect *TimeoutOrReference + err bool + }{ + { + name: "Valid Timeout", + jsonStr: `{"after": {"days": 1, "hours": 2}}`, + expect: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + err: false, + }, + { + name: "Valid Ref", + jsonStr: `"some-timeout-reference"`, + expect: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + err: false, + }, + { + name: "Invalid JSON", + jsonStr: `{"invalid": }`, + expect: nil, + err: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var tor TimeoutOrReference + err := json.Unmarshal([]byte(test.jsonStr), &tor) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expect, &tor) + } + }) + } +} + +func ptrString(s string) *string { + return &s +} + +func TestTimeoutOrReference_MarshalJSON(t *testing.T) { + // Test cases for TimeoutOrReference marshalling + tests := []struct { + name string + input *TimeoutOrReference + expect string + err bool + }{ + { + name: "Valid Timeout", + input: &TimeoutOrReference{ + Timeout: &Timeout{ + After: &Duration{DurationInline{ + Days: 1, + Hours: 2, + }}, + }, + }, + expect: `{"after":{"days":1,"hours":2}}`, + err: false, + }, + { + name: "Valid Ref", + input: &TimeoutOrReference{ + Reference: ptrString("some-timeout-reference"), + }, + expect: `"some-timeout-reference"`, + err: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + data, err := json.Marshal(test.input) + if test.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.JSONEq(t, test.expect, string(data)) + } + }) + } +} diff --git a/model/validator.go b/model/validator.go new file mode 100644 index 0000000..91c34b9 --- /dev/null +++ b/model/validator.go @@ -0,0 +1,389 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "errors" + "fmt" + "github.com/go-playground/validator/v10" + "regexp" + "strings" +) + +var ( + iso8601DurationPattern = regexp.MustCompile(`^P(\d+Y)?(\d+M)?(\d+D)?(T(\d+H)?(\d+M)?(\d+S)?)?$`) + semanticVersionPattern = regexp.MustCompile(`^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) + hostnameRFC1123Pattern = regexp.MustCompile(`^(([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)*[a-zA-Z]{2,63}|[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)$`) +) + +var validate *validator.Validate + +func registerValidator(tag string, fn validator.Func) { + + if err := validate.RegisterValidation(tag, fn); err != nil { + panic(fmt.Sprintf("Failed to register validator '%s': %v", tag, err)) + } +} + +func init() { + validate = validator.New() + + registerValidator("basic_policy", validateBasicPolicy) + registerValidator("bearer_policy", validateBearerPolicy) + registerValidator("digest_policy", validateDigestPolicy) + registerValidator("oauth2_policy", validateOAuth2Policy) + registerValidator("client_auth_type", validateOptionalOAuthClientAuthentication) + registerValidator("encoding_type", validateOptionalOAuth2TokenRequestEncoding) + + registerValidator("hostname_rfc1123", func(fl validator.FieldLevel) bool { + return isHostnameValid(fl.Field().String()) + }) + registerValidator("uri_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriPattern.MatchString(value) + }) + registerValidator("uri_template_pattern", func(fl validator.FieldLevel) bool { + value, ok := fl.Field().Interface().(string) + if !ok { + return false + } + return LiteralUriTemplatePattern.MatchString(value) + }) + registerValidator("semver_pattern", validateSemanticVersion) + registerValidator("iso8601_duration", validateISO8601Duration) + + registerValidator("object_or_string", validateObjectOrString) + registerValidator("object_or_runtime_expr", validateObjectOrRuntimeExpr) + registerValidator("string_or_runtime_expr", validateStringOrRuntimeExpr) + registerValidator("uri_template_or_runtime_expr", validateURITemplateOrRuntimeExpr) + registerValidator("json_pointer_or_runtime_expr", validateJsonPointerOrRuntimeExpr) + + registerValidator("switch_item", validateSwitchItem) + validate.RegisterStructValidation(validateTaskItem, TaskItem{}) +} + +func GetValidator() *validator.Validate { + return validate +} + +// validateTaskItem is a struct-level validation function for TaskItem. +func validateTaskItem(sl validator.StructLevel) { + taskItem := sl.Current().Interface().(TaskItem) + + // Validate Key + if taskItem.Key == "" { + sl.ReportError(taskItem.Key, "Key", "Key", "required", "") + return + } + + // Validate Task is not nil + if taskItem.Task == nil { + sl.ReportError(taskItem.Task, "Task", "Task", "required", "") + return + } + + // Validate the concrete type of Task and capture nested errors + switch t := taskItem.Task.(type) { + case *CallHTTP: + validateConcreteTask(sl, t, "Task") + case *CallOpenAPI: + validateConcreteTask(sl, t, "Task") + case *CallGRPC: + validateConcreteTask(sl, t, "Task") + case *CallAsyncAPI: + validateConcreteTask(sl, t, "Task") + case *CallFunction: + validateConcreteTask(sl, t, "Task") + case *DoTask: + validateConcreteTask(sl, t, "Task") + case *ForkTask: + validateConcreteTask(sl, t, "Task") + case *EmitTask: + validateConcreteTask(sl, t, "Task") + case *ForTask: + validateConcreteTask(sl, t, "Task") + case *ListenTask: + validateConcreteTask(sl, t, "Task") + case *RaiseTask: + validateConcreteTask(sl, t, "Task") + case *RunTask: + validateConcreteTask(sl, t, "Task") + case *SetTask: + validateConcreteTask(sl, t, "Task") + case *SwitchTask: + validateConcreteTask(sl, t, "Task") + case *TryTask: + validateConcreteTask(sl, t, "Task") + case *WaitTask: + validateConcreteTask(sl, t, "Task") + default: + sl.ReportError(taskItem.Task, "Task", "Task", "unknown_task", "unrecognized task type") + } +} + +// validateConcreteTask validates a concrete Task type and reports nested errors. +func validateConcreteTask(sl validator.StructLevel, task interface{}, fieldName string) { + err := validate.Struct(task) + if err != nil { + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, ve := range validationErrors { + // Report only nested fields to avoid duplicates + if ve.Namespace() != fieldName { + sl.ReportError(ve.Value(), fieldName+"."+ve.StructNamespace(), ve.StructField(), ve.Tag(), ve.Param()) + } + } + } + } +} + +// func validateSwitchItem(fl validator.FieldLevel) bool { is a custom validation function for SwitchItem. +func validateSwitchItem(fl validator.FieldLevel) bool { + switchItem, ok := fl.Field().Interface().(SwitchItem) + if !ok { + return false + } + return len(switchItem) == 1 +} + +// validateBasicPolicy ensures BasicAuthenticationPolicy has mutually exclusive fields set. +func validateBasicPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BasicAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +// validateBearerPolicy ensures BearerAuthenticationPolicy has mutually exclusive fields set. +func validateBearerPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(BearerAuthenticationPolicy) + if !ok { + return false + } + if policy.Token != "" && policy.Use != "" { + return false + } + return true +} + +// validateDigestPolicy ensures DigestAuthenticationPolicy has mutually exclusive fields set. +func validateDigestPolicy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(DigestAuthenticationPolicy) + if !ok { + return false + } + if (policy.Username != "" || policy.Password != "") && policy.Use != "" { + return false + } + return true +} + +func validateOAuth2Policy(fl validator.FieldLevel) bool { + policy, ok := fl.Parent().Interface().(OAuth2AuthenticationPolicy) + if !ok { + return false + } + + if (policy.Properties != nil || policy.Endpoints != nil) && policy.Use != "" { + return false // Both fields are set, invalid + } + if policy.Properties == nil && policy.Use == "" { + return false // Neither field is set, invalid + } + return true +} + +// validateOptionalOAuthClientAuthentication checks if the given value is a valid OAuthClientAuthenticationType. +func validateOptionalOAuthClientAuthentication(fl validator.FieldLevel) bool { + value := fl.Field().String() + + if len(value) == 0 { + return true + } + switch OAuthClientAuthenticationType(value) { + case + OAuthClientAuthClientSecretBasic, + OAuthClientAuthClientSecretPost, + OAuthClientAuthClientSecretJWT, + OAuthClientAuthPrivateKeyJWT, + OAuthClientAuthNone: + return true + default: + return false + } +} + +func validateOptionalOAuth2TokenRequestEncoding(fl validator.FieldLevel) bool { + value := fl.Field().String() + + // Allow empty fields (optional case) + if value == "" { + return true + } + + // Validate against allowed constants + switch OAuth2TokenRequestEncodingType(value) { + case + EncodingTypeFormUrlEncoded, + EncodingTypeApplicationJson: + return true + default: + return false + } +} + +func validateObjectOrString(fl validator.FieldLevel) bool { + // Access the "Value" field + value := fl.Field().Interface() + + // Validate based on the type of "Value" + switch v := value.(type) { + case string: + return v != "" // Validate non-empty strings. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Reject unsupported types. + } +} + +func validateObjectOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case map[string]interface{}: + return len(v) > 0 // Validate non-empty objects. + default: + return false // Unsupported types. + } +} + +func validateStringOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case RuntimeExpression: + return v.IsValid() // Validate runtime expression format. + case string: + return v != "" // Validate non-empty strings. + default: + return false // Unsupported types. + } +} + +func validateURITemplateOrRuntimeExpr(fl validator.FieldLevel) bool { + value := fl.Field().Interface() + + // Handle nil or empty values when 'omitempty' is used + if value == nil { + return true + } + + switch v := value.(type) { + case LiteralUri: + return LiteralUriPattern.MatchString(v.String()) + case LiteralUriTemplate: + return LiteralUriTemplatePattern.MatchString(v.String()) + case RuntimeExpression: + return v.IsValid() + case string: + // Check if the string is a valid URI + if LiteralUriPattern.MatchString(v) { + return true + } + + // Check if the string is a valid URI Template + if LiteralUriTemplatePattern.MatchString(v) { + return true + } + + // Check if the string is a valid RuntimeExpression + expression := RuntimeExpression{Value: v} + return expression.IsValid() + default: + fmt.Printf("Unsupported type in URITemplateOrRuntimeExpr.Value: %T\n", v) + return false + } +} + +func validateJsonPointerOrRuntimeExpr(fl validator.FieldLevel) bool { + // Retrieve the field value using reflection + value := fl.Field().Interface() + + // Validate based on the type + switch v := value.(type) { + case string: // JSON Pointer + return JSONPointerPattern.MatchString(v) + case RuntimeExpression: + return v.IsValid() + default: + return false // Unsupported types. + } +} + +func validateISO8601Duration(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isISO8601DurationValid(input) +} + +func validateSemanticVersion(fl validator.FieldLevel) bool { + input, ok := fl.Field().Interface().(string) + if !ok { + return false + } + + return isSemanticVersionValid(input) +} + +// isISO8601DurationValid validates if a string is a valid ISO 8601 duration. +func isISO8601DurationValid(input string) bool { + if !iso8601DurationPattern.MatchString(input) { + return false + } + + trimmed := strings.TrimPrefix(input, "P") + if trimmed == "" || trimmed == "T" { + return false + } + + return true +} + +// isSemanticVersionValid validates if a string is a valid semantic version. +func isSemanticVersionValid(input string) bool { + return semanticVersionPattern.MatchString(input) +} + +// isHostnameValid validates if a string is a valid RFC 1123 hostname. +func isHostnameValid(input string) bool { + return hostnameRFC1123Pattern.MatchString(input) +} diff --git a/model/validator_test.go b/model/validator_test.go new file mode 100644 index 0000000..6607369 --- /dev/null +++ b/model/validator_test.go @@ -0,0 +1,68 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" +) + +func TestRegexValidators(t *testing.T) { + testCases := []struct { + name string + validate func(string) bool + input string + expected bool + }{ + // ISO 8601 Duration Tests + {"ISO 8601 Duration Valid 1", isISO8601DurationValid, "P2Y", true}, + {"ISO 8601 Duration Valid 2", isISO8601DurationValid, "P1DT12H30M", true}, + {"ISO 8601 Duration Valid 3", isISO8601DurationValid, "P1Y2M3D", true}, + {"ISO 8601 Duration Valid 4", isISO8601DurationValid, "P1Y2M3D4H", false}, + {"ISO 8601 Duration Valid 5", isISO8601DurationValid, "P1Y", true}, + {"ISO 8601 Duration Valid 6", isISO8601DurationValid, "PT1H", true}, + {"ISO 8601 Duration Valid 7", isISO8601DurationValid, "P1Y2M3D4H5M6S", false}, + {"ISO 8601 Duration Invalid 1", isISO8601DurationValid, "P", false}, + {"ISO 8601 Duration Invalid 2", isISO8601DurationValid, "P1Y2M3D4H5M6S7", false}, + {"ISO 8601 Duration Invalid 3", isISO8601DurationValid, "1Y", false}, + + // Semantic Versioning Tests + {"Semantic Version Valid 1", isSemanticVersionValid, "1.0.0", true}, + {"Semantic Version Valid 2", isSemanticVersionValid, "1.2.3", true}, + {"Semantic Version Valid 3", isSemanticVersionValid, "1.2.3-beta", true}, + {"Semantic Version Valid 4", isSemanticVersionValid, "1.2.3-beta.1", true}, + {"Semantic Version Valid 5", isSemanticVersionValid, "1.2.3-beta.1+build.123", true}, + {"Semantic Version Invalid 1", isSemanticVersionValid, "v1.2.3", false}, + {"Semantic Version Invalid 2", isSemanticVersionValid, "1.2", false}, + {"Semantic Version Invalid 3", isSemanticVersionValid, "1.2.3-beta.x", true}, + + // RFC 1123 Hostname Tests + {"RFC 1123 Hostname Valid 1", isHostnameValid, "example.com", true}, + {"RFC 1123 Hostname Valid 2", isHostnameValid, "my-hostname", true}, + {"RFC 1123 Hostname Valid 3", isHostnameValid, "subdomain.example.com", true}, + {"RFC 1123 Hostname Invalid 1", isHostnameValid, "127.0.0.1", false}, + {"RFC 1123 Hostname Invalid 2", isHostnameValid, "example.com.", false}, + {"RFC 1123 Hostname Invalid 3", isHostnameValid, "example..com", false}, + {"RFC 1123 Hostname Invalid 4", isHostnameValid, "example.com-", false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := tc.validate(tc.input) + if result != tc.expected { + t.Errorf("Validation failed for '%s': input='%s', expected=%v, got=%v", tc.name, tc.input, tc.expected, result) + } + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 54723bb..17973e1 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -1,10 +1,10 @@ -// Copyright 2021 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -15,583 +15,221 @@ package model import ( - "bytes" "encoding/json" "errors" - - "github.com/serverlessworkflow/sdk-go/v2/util" + "fmt" ) -// InvokeKind defines how the target is invoked. -type InvokeKind string - -func (i InvokeKind) KindValues() []string { - return []string{ - string(InvokeKindSync), - string(InvokeKindAsync), +// Workflow represents the root structure of a workflow. +type Workflow struct { + Document Document `json:"document" yaml:"document" validate:"required"` + Input *Input `json:"input,omitempty" yaml:"input,omitempty"` + Use *Use `json:"use,omitempty" yaml:"use"` + Do *TaskList `json:"do" yaml:"do" validate:"required,dive"` + Timeout *TimeoutOrReference `json:"timeout,omitempty" yaml:"timeout,omitempty"` + Output *Output `json:"output,omitempty" yaml:"output,omitempty"` + Schedule *Schedule `json:"schedule,omitempty" yaml:"schedule,omitempty"` +} + +func (w *Workflow) MarshalYAML() (interface{}, error) { + // Create a map to hold fields + data := map[string]interface{}{ + "document": w.Document, } -} -func (i InvokeKind) String() string { - return string(i) -} - -const ( - // InvokeKindSync meaning that worfklow execution should wait until the target completes. - InvokeKindSync InvokeKind = "sync" - // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its - // completion. - InvokeKindAsync InvokeKind = "async" -) - -// ActionMode specifies how actions are to be performed. -type ActionMode string - -func (i ActionMode) KindValues() []string { - return []string{ - string(ActionModeSequential), - string(ActionModeParallel), + // Conditionally add fields + if w.Input != nil { + data["input"] = w.Input + } + if w.Use != nil { + data["use"] = w.Use + } + data["do"] = w.Do + if w.Timeout != nil { + data["timeout"] = w.Timeout + } + if w.Output != nil { + data["output"] = w.Output + } + if w.Schedule != nil { + data["schedule"] = w.Schedule } -} -func (i ActionMode) String() string { - return string(i) + return data, nil } -const ( - // ActionModeSequential specifies actions should be performed in sequence - ActionModeSequential ActionMode = "sequential" - - // ActionModeParallel specifies actions should be performed in parallel - ActionModeParallel ActionMode = "parallel" -) - -const ( - // UnlimitedTimeout description for unlimited timeouts - UnlimitedTimeout = "unlimited" -) - -type ExpressionLangType string - -func (i ExpressionLangType) KindValues() []string { - return []string{ - string(JqExpressionLang), - string(JsonPathExpressionLang), - string(CELExpressionLang), - } +// Document holds metadata for the workflow. +type Document struct { + DSL string `json:"dsl" yaml:"dsl" validate:"required,semver_pattern"` + Namespace string `json:"namespace" yaml:"namespace" validate:"required,hostname_rfc1123"` + Name string `json:"name" yaml:"name" validate:"required,hostname_rfc1123"` + Version string `json:"version" yaml:"version" validate:"required,semver_pattern"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + Tags map[string]string `json:"tags,omitempty" yaml:"tags,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty" yaml:"metadata,omitempty"` } -func (i ExpressionLangType) String() string { - return string(i) +// Input Configures the workflow's input. +type Input struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + From *ObjectOrRuntimeExpr `json:"from,omitempty" validate:"omitempty"` } -const ( - //JqExpressionLang ... - JqExpressionLang ExpressionLangType = "jq" - - // JsonPathExpressionLang ... - JsonPathExpressionLang ExpressionLangType = "jsonpath" - - // CELExpressionLang - CELExpressionLang ExpressionLangType = "cel" -) - -// BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces -// to make it easy for custom unmarshalers implementations to unmarshal the common data structure. -// +builder-gen:new-call=ApplyDefault -type BaseWorkflow struct { - // Workflow unique identifier - // +optional - ID string `json:"id,omitempty" validate:"required_without=Key"` - // Key Domain-specific workflow identifier - // +optional - Key string `json:"key,omitempty" validate:"required_without=ID"` - // Workflow name - Name string `json:"name,omitempty"` - // Workflow description. - // +optional - Description string `json:"description,omitempty"` - // Workflow version. - // +optional - Version string `json:"version" validate:"omitempty,min=1"` - // Workflow start definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Start *Start `json:"start,omitempty"` - // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important - // qualities. - // +optional - Annotations []string `json:"annotations,omitempty"` - // DataInputSchema URI or Object of the JSON Schema used to validate the workflow data input - // +optional - DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` - // Serverless Workflow schema version - // +kubebuilder:validation:Required - // +kubebuilder:default="0.8" - SpecVersion string `json:"specVersion" validate:"required"` - // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, - // inside your Workflow Expressions. - // +optional - Secrets Secrets `json:"secrets,omitempty" validate:"unique"` - // Constants Workflow constants are used to define static, and immutable, data which is available to - // Workflow Expressions. - // +optional - Constants *Constants `json:"constants,omitempty"` - // Identifies the expression language used for workflow expressions. Default is 'jq'. - // +kubebuilder:validation:Enum=jq;jsonpath;cel - // +kubebuilder:default=jq - // +optional - ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"required,oneofkind"` - // Defines the workflow default timeout settings. - // +optional - Timeouts *Timeouts `json:"timeouts,omitempty"` - // Defines checked errors that can be explicitly handled during workflow execution. - // +optional - Errors Errors `json:"errors,omitempty" validate:"unique=Name,dive"` - // If "true", workflow instances is not terminated when there are no active execution paths. - // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" - // +optional - KeepActive bool `json:"keepActive,omitempty"` - // Metadata custom information shared with the runtime. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Metadata Metadata `json:"metadata,omitempty"` - // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false - // +optional - AutoRetries bool `json:"autoRetries,omitempty"` - // Auth definitions can be used to define authentication information that should be applied to resources defined - // in the operation property of function definitions. It is not used as authentication information for the - // function invocation, but just to access the resource containing the function invocation information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Auth Auths `json:"auth,omitempty" validate:"unique=Name,dive"` +// Output Configures the output of a workflow or task. +type Output struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -// ApplyDefault set the default values for Workflow -func (w *BaseWorkflow) ApplyDefault() { - w.SpecVersion = "0.8" - w.ExpressionLang = JqExpressionLang +// Export Set the content of the context. +type Export struct { + Schema *Schema `json:"schema,omitempty" validate:"omitempty"` + As *ObjectOrRuntimeExpr `json:"as,omitempty" validate:"omitempty"` } -type Auths []Auth - -type authsUnmarshal Auths - -// UnmarshalJSON implements json.Unmarshaler -func (r *Auths) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) +// Schedule the workflow. +type Schedule struct { + Every *Duration `json:"every,omitempty" validate:"omitempty"` + Cron string `json:"cron,omitempty" validate:"omitempty"` + After *Duration `json:"after,omitempty" validate:"omitempty"` + On *EventConsumptionStrategy `json:"on,omitempty" validate:"omitempty"` } -type Errors []Error - -type errorsUnmarshal Errors +const DefaultSchema = "json" -// UnmarshalJSON implements json.Unmarshaler -func (e *Errors) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) +// Schema represents the definition of a schema. +type Schema struct { + Format string `json:"format,omitempty"` + Document interface{} `json:"document,omitempty" validate:"omitempty"` + Resource *ExternalResource `json:"resource,omitempty" validate:"omitempty"` } -// Workflow base definition -// +builder-gen:embedded-ignore-method=BaseWorkflow -type Workflow struct { - BaseWorkflow `json:",inline"` - // +kubebuilder:pruning:PreserveUnknownFields - States States `json:"states" validate:"min=1,unique=Name,dive"` - // +optional - Events Events `json:"events,omitempty" validate:"unique=Name,dive"` - // +optional - Functions Functions `json:"functions,omitempty" validate:"unique=Name,dive"` - // +optional - Retries Retries `json:"retries,omitempty" validate:"unique=Name,dive"` +func (s *Schema) ApplyDefaults() { + if len(s.Format) == 0 { + s.Format = DefaultSchema + } } -type workflowUnmarshal Workflow +// UnmarshalJSON for Schema enforces "oneOf" behavior. +func (s *Schema) UnmarshalJSON(data []byte) error { + s.ApplyDefaults() -// UnmarshalJSON implementation for json Unmarshal function for the Workflow type -func (w *Workflow) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - err := util.UnmarshalObject("workflow", data, (*workflowUnmarshal)(w)) - if err != nil { + // Parse into a temporary map for flexibility + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { return err } - if w.Start == nil && len(w.States) > 0 { - w.Start = &Start{ - StateName: w.States[0].Name, + // Check for "document" + if doc, ok := raw["document"]; ok { + // Determine if "document" is a string or an object + switch doc.(type) { + case string: + s.Document = doc + case map[string]interface{}: + s.Document = doc + default: + return errors.New("invalid Schema: 'document' must be a string or an object") } } - return nil -} - -// States ... -// +kubebuilder:validation:MinItems=1 -type States []State - -type statesUnmarshal States - -// UnmarshalJSON implements json.Unmarshaler -func (s *States) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("states", data, (*statesUnmarshal)(s)) -} - -type Events []Event - -type eventsUnmarshal Events - -// UnmarshalJSON implements json.Unmarshaler -func (e *Events) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) -} - -type Functions []Function - -type functionsUnmarshal Functions - -// UnmarshalJSON implements json.Unmarshaler -func (f *Functions) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) -} - -type Retries []Retry - -type retriesUnmarshal Retries - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retries) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) -} - -// Timeouts ... -type Timeouts struct { - // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should - // be 'unlimited'. - // +optional - WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` - // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format). - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format). - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format). - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` - // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format). - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` -} - -type timeoutsUnmarshal Timeouts - -// UnmarshalJSON implements json.Unmarshaler -func (t *Timeouts) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) -} - -// WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration -// format. If not defined, the workflow execution should be given "unlimited" amount of time to complete. -// +builder-gen:new-call=ApplyDefault -type WorkflowExecTimeout struct { - // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. - // +kubebuilder:default=unlimited - Duration string `json:"duration" validate:"required,min=1,iso8601duration"` - // If false, workflow instance is allowed to finish current execution. If true, current workflow execution - // is stopped immediately. Default is false. - // +optional - Interrupt bool `json:"interrupt,omitempty"` - // Name of a workflow state to be executed before workflow instance is terminated. - // +optional - RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` -} - -type workflowExecTimeoutUnmarshal WorkflowExecTimeout - -// UnmarshalJSON implements json.Unmarshaler -func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) -} - -// ApplyDefault set the default values for Workflow Exec Timeout -func (w *WorkflowExecTimeout) ApplyDefault() { - w.Duration = UnlimitedTimeout -} - -// Error declaration for workflow definitions -type Error struct { - // Name Domain-specific error name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. - // Should not be defined if error is set to '*'. - // +optional - Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // OnError description. - // +optional - Description string `json:"description,omitempty"` -} - -// Start definition -type Start struct { - // Name of the starting workflow state - // +kubebuilder:validation:Required - StateName string `json:"stateName" validate:"required"` - // Define the recurring time intervals or cron expressions at which workflow instances should be automatically - // started. - // +optional - Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` -} - -type startUnmarshal Start - -// UnmarshalJSON implements json.Unmarshaler -func (s *Start) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) -} - -// Schedule ... -type Schedule struct { - // TODO Interval is required if Cron is not set and vice-versa, make a exclusive validation - // A recurring time interval expressed in the derivative of ISO 8601 format specified below. Declares that - // workflow instances should be automatically created at the start of each time interval in the series. - // +optional - Interval string `json:"interval,omitempty"` - // Cron expression defining when workflow instances should be automatically created. - // optional - Cron *Cron `json:"cron,omitempty"` - // Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time - // w/ timezone then proper timezone conversion will be applied. (default: UTC). - // +optional - Timezone string `json:"timezone,omitempty"` -} - -type scheduleUnmarshal Schedule - -// UnmarshalJSON implements json.Unmarshaler -func (s *Schedule) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) -} - -// Cron ... -type Cron struct { - // Cron expression describing when the workflow instance should be created (automatically). - // +kubebuilder:validation:Required - Expression string `json:"expression" validate:"required"` - // Specific date and time (ISO 8601 format) when the cron expression is no longer valid. - // +optional - ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601datetime"` -} + // Check for "resource" + if res, ok := raw["resource"]; ok { + var resource ExternalResource + resBytes, err := json.Marshal(res) + if err != nil { + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) + } + if err := json.Unmarshal(resBytes, &resource); err != nil { + return fmt.Errorf("invalid Schema: failed to parse 'resource': %w", err) + } + s.Resource = &resource + } -type cronUnmarshal Cron + // Validate "oneOf" logic + if (s.Document != nil && s.Resource != nil) || (s.Document == nil && s.Resource == nil) { + return errors.New("invalid Schema: must specify either 'document' or 'resource', but not both") + } -// UnmarshalJSON custom unmarshal function for Cron -func (c *Cron) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) + return nil } -// Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). -// Each state can define a transition definition that is used to determine which state to transition to next. -type Transition struct { - stateParent *State `json:"-"` // used in validation - // Name of the state to transition to next. - // +kubebuilder:validation:Required - NextState string `json:"nextState" validate:"required,min=1"` - // Array of producedEvent definitions. Events to be produced before the transition takes place. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty" validate:"omitempty,dive"` - // If set to true, triggers workflow compensation before this transition is taken. Default is false. - // +kubebuilder:default=false - // +optional - Compensate bool `json:"compensate,omitempty"` -} +// MarshalJSON for Schema marshals the correct field. +func (s *Schema) MarshalJSON() ([]byte, error) { + s.ApplyDefaults() -type transitionUnmarshal Transition + if s.Document != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "document": s.Document, + }) + } + if s.Resource != nil { + return json.Marshal(map[string]interface{}{ + "format": s.Format, + "resource": s.Resource, + }) + } -// UnmarshalJSON implements json.Unmarshaler -func (t *Transition) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) + return nil, errors.New("invalid Schema: no valid field to marshal") } -// OnError ... -type OnError struct { - // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used - ErrorRef string `json:"errorRef,omitempty"` - // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used - ErrorRefs []string `json:"errorRefs,omitempty" validate:"omitempty,unique"` - // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` +type ExternalResource struct { + Name string `json:"name,omitempty"` + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// End definition -type End struct { - // If true, completes all execution flows in the given workflow instance. - // +optional - Terminate bool `json:"terminate,omitempty"` - // Array of producedEvent definitions. Defines events that should be produced. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` - // If set to true, triggers workflow compensation before workflow execution completes. Default is false. - // +optional - Compensate bool `json:"compensate,omitempty"` - // Defines that current workflow execution should stop, and execution should continue as a new workflow - // instance of the provided id - // +optional - ContinueAs *ContinueAs `json:"continueAs,omitempty"` +type Use struct { + Authentications map[string]*AuthenticationPolicy `json:"authentications,omitempty" validate:"omitempty,dive"` + Errors map[string]*Error `json:"errors,omitempty" validate:"omitempty,dive"` + Extensions ExtensionList `json:"extensions,omitempty" validate:"omitempty,dive"` + Functions NamedTaskMap `json:"functions,omitempty" validate:"omitempty,dive"` + Retries map[string]*RetryPolicy `json:"retries,omitempty" validate:"omitempty,dive"` + Secrets []string `json:"secrets,omitempty"` + Timeouts map[string]*Timeout `json:"timeouts,omitempty" validate:"omitempty,dive"` + Catalogs map[string]*Catalog `json:"catalogs,omitempty" validate:"omitempty,dive"` } -type endUnmarshal End - -// UnmarshalJSON implements json.Unmarshaler -func (e *End) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) +type Catalog struct { + Endpoint *Endpoint `json:"endpoint" validate:"required"` } -// ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) -type ContinueAs struct { - // Unique id of the workflow to continue execution as. - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Version of the workflow to continue execution as. - // +optional - Version string `json:"version,omitempty"` - // If string type, an expression which selects parts of the states data output to become the workflow data input of - // continued execution. If object type, a custom object to become the workflow data input of the continued execution - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data Object `json:"data,omitempty"` - // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. - // Overwrites any specific settings set by that workflow - // +optional - WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` +// FlowDirective represents a directive that can be an enumerated or free-form string. +type FlowDirective struct { + Value string `json:"-" validate:"required"` // Ensure the value is non-empty. } -type continueAsUnmarshal ContinueAs - -// UnmarshalJSON implements json.Unmarshaler -func (c *ContinueAs) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) -} +type FlowDirectiveType string -// ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a -// workflow transitions. The eventRef property must match the name of one of the defined produced events in the -// events definition. -type ProduceEvent struct { - // Reference to a defined unique event name in the events definition - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // If String, expression which selects parts of the states data output to become the data of the produced event. - // If object a custom object to become the data of produced event. - // +optional - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Data Object `json:"data,omitempty"` - // Add additional event extension context attributes. - // +optional - ContextAttributes map[string]string `json:"contextAttributes,omitempty"` -} +const ( + FlowDirectiveContinue FlowDirectiveType = "continue" + FlowDirectiveExit FlowDirectiveType = "exit" + FlowDirectiveEnd FlowDirectiveType = "end" +) -// StateDataFilter ... -type StateDataFilter struct { - // Workflow expression to filter the state data input - Input string `json:"input,omitempty"` - // Workflow expression that filters the state data output - Output string `json:"output,omitempty"` +// Enumerated values for FlowDirective. +var validFlowDirectives = map[string]struct{}{ + "continue": {}, + "exit": {}, + "end": {}, } -// DataInputSchema Used to validate the workflow data input against a defined JSON Schema -// +builder-gen:new-call=ApplyDefault -type DataInputSchema struct { - // +kubebuilder:validation:Required - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - Schema *Object `json:"schema" validate:"required"` - // +kubebuilder:validation:Required - FailOnValidationErrors bool `json:"failOnValidationErrors"` +// IsEnum checks if the FlowDirective matches one of the enumerated values. +func (f *FlowDirective) IsEnum() bool { + _, exists := validFlowDirectives[f.Value] + return exists } -type dataInputSchemaUnmarshal DataInputSchema - -// UnmarshalJSON implements json.Unmarshaler -func (d *DataInputSchema) UnmarshalJSON(data []byte) error { - d.ApplyDefault() - - // expected: data = "{\"key\": \"value\"}" - // data = {"key": "value"} - // data = "file://..." - // data = { "schema": "{\"key\": \"value\"}", "failOnValidationErrors": true } - // data = { "schema": {"key": "value"}, "failOnValidationErrors": true } - // data = { "schema": "file://...", "failOnValidationErrors": true } - - schemaString := "" - err := util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &schemaString, (*dataInputSchemaUnmarshal)(d)) - if err != nil { +func (f *FlowDirective) UnmarshalJSON(data []byte) error { + var value string + if err := json.Unmarshal(data, &value); err != nil { return err } - - if d.Schema != nil { - if d.Schema.Type == Map { - return nil - - } else if d.Schema.Type == String { - schemaString = d.Schema.StringValue - - } else { - return errors.New("invalid dataInputSchema must be a string or object") - } - } - - if schemaString != "" { - data = []byte(schemaString) - if bytes.TrimSpace(data)[0] != '{' { - data = []byte("\"" + schemaString + "\"") - } - } - - d.Schema = new(Object) - return util.UnmarshalObject("schema", data, &d.Schema) -} - -// ApplyDefault set the default values for Data Input Schema -func (d *DataInputSchema) ApplyDefault() { - d.FailOnValidationErrors = true -} - -// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your -// Workflow Expressions. -type Secrets []string - -type secretsUnmarshal Secrets - -// UnmarshalJSON implements json.Unmarshaler -func (s *Secrets) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) -} - -// Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. -type Constants struct { - // Data represents the generic structure of the constants value - // +optional - Data ConstantsData `json:",omitempty"` + f.Value = value + return nil } -// UnmarshalJSON implements json.Unmarshaler -func (c *Constants) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("constants", data, &c.Data) +func (f *FlowDirective) MarshalJSON() ([]byte, error) { + return json.Marshal(f.Value) } - -type ConstantsData map[string]json.RawMessage diff --git a/model/workflow_ref.go b/model/workflow_ref.go deleted file mode 100644 index c1fd1ce..0000000 --- a/model/workflow_ref.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// CompletionType define on how to complete branch execution. -type OnParentCompleteType string - -func (i OnParentCompleteType) KindValues() []string { - return []string{ - string(OnParentCompleteTypeTerminate), - string(OnParentCompleteTypeContinue), - } -} - -func (i OnParentCompleteType) String() string { - return string(i) -} - -const ( - OnParentCompleteTypeTerminate OnParentCompleteType = "terminate" - OnParentCompleteTypeContinue OnParentCompleteType = "continue" -) - -// WorkflowRef holds a reference for a workflow definition -// +builder-gen:new-call=ApplyDefault -type WorkflowRef struct { - // Sub-workflow unique id - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - // +optional - Version string `json:"version,omitempty"` - // Specifies if the subflow should be invoked sync or async. - // Defaults to sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - // +optional - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` - // onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - // is 'async'. Defaults to terminate. - // +kubebuilder:validation:Enum=terminate;continue - // +kubebuilder:default=terminate - OnParentComplete OnParentCompleteType `json:"onParentComplete,omitempty" validate:"required,oneofkind"` -} - -type workflowRefUnmarshal WorkflowRef - -// UnmarshalJSON implements json.Unmarshaler -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - s.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) -} - -// ApplyDefault set the default values for Workflow Ref -func (s *WorkflowRef) ApplyDefault() { - s.Invoke = InvokeKindSync - s.OnParentComplete = "terminate" -} diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go deleted file mode 100644 index 4a69fb5..0000000 --- a/model/workflow_ref_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestWorkflowRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "normal object test", - data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "normal object test & defaults", - data: `{"workflowId": "1"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "normal string test", - data: `"1"`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid string format", - data: `"1`, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{}, - err: "subFlowRef.workflowId must be string", - }, - { - desp: "invalid string or object", - data: `1`, - expect: WorkflowRef{}, - err: `subFlowRef must be string or object`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/workflow_ref_validator_test.go b/model/workflow_ref_validator_test.go deleted file mode 100644 index 96a7f9c..0000000 --- a/model/workflow_ref_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestWorkflowRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(&baseWorkflow.States[0], true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].OperationState.Actions[0].FunctionRef = nil - baseWorkflow.States[0].OperationState.Actions[0].SubFlowRef = &WorkflowRef{ - WorkflowID: "workflowID", - Invoke: InvokeKindSync, - OnParentComplete: OnParentCompleteTypeTerminate, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.WorkflowID = "" - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.workflowID is required -workflow.states[0].actions[0].subFlowRef.invoke is required -workflow.states[0].actions[0].subFlowRef.onParentComplete is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "invalid invoce" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "invalid parent complete" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.invoke need by one of [sync async] -workflow.states[0].actions[0].subFlowRef.onParentComplete need by one of [terminate continue]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/workflow_test.go b/model/workflow_test.go index a5aa42a..df90f1e 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -1,10 +1,10 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2025 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -16,662 +16,554 @@ package model import ( "encoding/json" - "fmt" - "net/http" - "net/http/httptest" + "errors" "testing" - "github.com/serverlessworkflow/sdk-go/v2/util" + "github.com/go-playground/validator/v10" + "github.com/stretchr/testify/assert" ) -func TestWorkflowStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Workflow - err string - } - testCases := []testCase{ - { - desp: "start string", - data: `{"start": "start state name"}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{}, - }, - err: ``, - }, - { - desp: "start empty and use the first state", - data: `{"states": [{"name": "start state name", "type": "operation"}]}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{ - { - BaseState: BaseState{ - Name: "start state name", - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - }, - }, - }, - }, - err: ``, - }, - { - desp: "start empty and states empty", - data: `{"states": []}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - }, - States: []State{}, - }, - err: ``, +func TestDocument_JSONMarshal(t *testing.T) { + doc := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", }, } - for _, tc := range testCases[1:] { - t.Run(tc.desp, func(t *testing.T) { - var v Workflow - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } + data, err := json.Marshal(doc) + assert.NoError(t, err) + + expectedJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + // Use JSON comparison to avoid formatting mismatches + var expected, actual map[string]interface{} + assert.NoError(t, json.Unmarshal([]byte(expectedJSON), &expected)) + assert.NoError(t, json.Unmarshal(data, &actual)) + assert.Equal(t, expected, actual) } -func TestContinueAsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ContinueAs - err string - } - testCases := []testCase{ - { - desp: "string", - data: `"1"`, - expect: ContinueAs{ - WorkflowID: "1", - }, - err: ``, - }, - { - desp: "object all field set", - data: `{"workflowId": "1", "version": "2", "data": "3", "workflowExecTimeout": {"duration": "PT1H", "interrupt": true, "runBefore": "4"}}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "2", - Data: FromString("3"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "PT1H", - Interrupt: true, - RunBefore: "4", - }, - }, - err: ``, - }, - { - desp: "object optional field unset", - data: `{"workflowId": "1"}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "", - Data: Object{}, - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "", - Interrupt: false, - RunBefore: "", - }, - }, - err: ``, - }, - { - desp: "invalid string format", - data: `"{`, - expect: ContinueAs{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid object format", - data: `{"workflowId": 1}`, - expect: ContinueAs{}, - err: `continueAs.workflowId must be string`, +func TestDocument_JSONUnmarshal(t *testing.T) { + inputJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "title": "Example Workflow", + "summary": "This is a sample workflow document.", + "tags": { + "env": "prod", + "team": "workflow" + }, + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) + + expected := Document{ + DSL: "1.0.0", + Namespace: "example-namespace", + Name: "example-name", + Version: "1.0.0", + Title: "Example Workflow", + Summary: "This is a sample workflow document.", + Tags: map[string]string{ + "env": "prod", + "team": "workflow", + }, + Metadata: map[string]interface{}{ + "author": "John Doe", + "created": "2025-01-01", }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ContinueAs - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + assert.Equal(t, expected, doc) } -func TestEndUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect End - err string - } - testCases := []testCase{ - { - desp: "bool success", - data: `true`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: "string fail", - data: `"true"`, - expect: End{}, - err: `end must be bool or object`, - }, - { - desp: `object success`, - data: `{"terminate": true}`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"terminate": "true"}`, - expect: End{ - Terminate: true, - }, - err: `end.terminate must be bool`, - }, - { - desp: `object key invalid`, - data: `{"terminate_parameter_invalid": true}`, - expect: End{}, - err: ``, +func TestDocument_JSONUnmarshal_InvalidJSON(t *testing.T) { + invalidJSON := `{ + "dsl": "1.0.0", + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0", + "tags": { + "env": "prod", + "team": "workflow" + "metadata": { + "author": "John Doe", + "created": "2025-01-01" + } + }` // Missing closing brace for "tags" + + var doc Document + err := json.Unmarshal([]byte(invalidJSON), &doc) + assert.Error(t, err) +} + +func TestDocument_Validation_MissingRequiredField(t *testing.T) { + inputJSON := `{ + "namespace": "example-namespace", + "name": "example-name", + "version": "1.0.0" + }` // Missing "dsl" + + var doc Document + err := json.Unmarshal([]byte(inputJSON), &doc) + assert.NoError(t, err) // JSON is valid for unmarshalling + + // Validate the struct + err = validate.Struct(doc) + assert.Error(t, err) + + // Assert that the error is specifically about the missing "dsl" field + assert.Contains(t, err.Error(), "Key: 'Document.DSL' Error:Field validation for 'DSL' failed on the 'required' tag") +} + +func TestSchemaValidation(t *testing.T) { + + tests := []struct { + name string + jsonInput string + valid bool + }{ + // Valid Cases + { + name: "Valid Inline Schema", + jsonInput: `{ + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid External Schema Without Name", + jsonInput: `{ + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + { + name: "Valid Inline Schema with Format", + jsonInput: `{ + "format": "yaml", + "document": "{\"key\":\"value\"}" + }`, + valid: true, + }, + { + name: "Valid External Schema with Format", + jsonInput: `{ + "format": "xml", + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: true, + }, + // Invalid Cases + { + name: "Invalid Both Document and Resource", + jsonInput: `{ + "document": "{\"key\":\"value\"}", + "resource": { + "endpoint": { + "uri": "http://example.com/schema" + } + } + }`, + valid: false, + }, + { + name: "Invalid Missing Both Document and Resource", + jsonInput: `{ + "format": "json" + }`, + valid: false, + }, + { + name: "Invalid Resource Without Endpoint", + jsonInput: `{ + "resource": { + "name": "external-schema" + } + }`, + valid: false, + }, + { + name: "Invalid Resource with Invalid URL", + jsonInput: `{ + "resource": { + "name": "external-schema", + "endpoint": { + "uri": "not-a-valid-url" + } + } + }`, + valid: false, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v End - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var schema Schema + err := json.Unmarshal([]byte(tt.jsonInput), &schema) + if tt.valid { + // Assert no unmarshalling error + assert.NoError(t, err) + + // Validate the struct + err = validate.Struct(schema) + assert.NoError(t, err, "Expected valid schema but got validation error: %v", err) + } else { + // Assert unmarshalling or validation error + if err == nil { + err = validate.Struct(schema) + } + assert.Error(t, err, "Expected validation error but got none") + } }) } } -func TestWorkflowExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowExecTimeout - err string - } +type InputTestCase struct { + Name string + Input Input + ShouldErr bool +} - testCases := []testCase{ - { - desp: "string success", - data: `"PT15M"`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: ``, - }, - { - desp: "string fail", - data: `PT15M`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: `invalid character 'P' looking for beginning of value`, - }, - { - desp: `object success`, - data: `{"duration": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", +func TestInputValidation(t *testing.T) { + cases := []InputTestCase{ + { + Name: "Valid Input with Schema and From (object)", + Input: Input{ + Schema: &Schema{ + Format: "json", + Document: func() *string { + doc := "example schema" + return &doc + }(), + }, + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{ + "key": "value", + }, + }, }, - err: ``, + ShouldErr: false, }, { - desp: `object fail`, - data: `{"duration": PT15M}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", + Name: "Invalid Input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "example input", + }, }, - err: `invalid character 'P' looking for beginning of value`, + ShouldErr: true, }, { - desp: `object key invalid`, - data: `{"duration_invalid": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "unlimited", + Name: "Valid Input with Schema and From (expr)", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, + From: &ObjectOrRuntimeExpr{ + Value: "${ expression }", + }, }, - err: ``, + ShouldErr: true, }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowExecTimeout - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Start - err string - } - - testCases := []testCase{ { - desp: "string success", - data: `"start state"`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Empty From (expr)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: "", + }, }, - err: ``, + ShouldErr: true, }, { - desp: "string fail", - data: `start state`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Empty From (object)", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: map[string]interface{}{}, + }, }, - err: `invalid character 's' looking for beginning of value`, + ShouldErr: true, }, { - desp: `object success`, - data: `{"stateName": "start state"}`, - expect: Start{ - StateName: "start state", + Name: "Invalid Input with Unsupported From Type", + Input: Input{ + From: &ObjectOrRuntimeExpr{ + Value: 123, + }, }, - err: ``, + ShouldErr: true, }, { - desp: `object fail`, - data: `{"stateName": start state}`, - expect: Start{ - StateName: "start state", + Name: "Valid Input with Schema Only", + Input: Input{ + Schema: &Schema{ + Format: "json", + }, }, - err: `invalid character 's' looking for beginning of value`, + ShouldErr: false, }, { - desp: `object key invalid`, - data: `{"stateName_invalid": "start state"}`, - expect: Start{ - StateName: "", - }, - err: ``, + Name: "Input with Neither Schema Nor From", + Input: Input{}, + ShouldErr: false, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Start - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + err := validate.Struct(tc.Input) + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") + } }) } } -func TestCronUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Cron - err string - } - - testCases := []testCase{ +func TestFlowDirectiveValidation(t *testing.T) { + cases := []struct { + Name string + Input FlowDirective + IsEnum bool // Expected result for IsEnum method. + ShouldErr bool // Expected result for validation. + }{ { - desp: "string success", - data: `"0 15,30,45 * ? * *"`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, + Name: "Valid Enum: continue", + Input: FlowDirective{Value: "continue"}, + IsEnum: true, + ShouldErr: false, }, { - desp: "string fail", - data: `0 15,30,45 * ? * *`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `invalid character '1' after top-level value`, + Name: "Valid Enum: exit", + Input: FlowDirective{Value: "exit"}, + IsEnum: true, + ShouldErr: false, }, { - desp: `object success`, - data: `{"expression": "0 15,30,45 * ? * *"}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, + Name: "Valid Enum: end", + Input: FlowDirective{Value: "end"}, + IsEnum: true, + ShouldErr: false, }, { - desp: `object fail`, - data: `{"expression": "0 15,30,45 * ? * *}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `unexpected end of JSON input`, + Name: "Valid Free-form String", + Input: FlowDirective{Value: "custom-directive"}, + IsEnum: false, + ShouldErr: false, }, { - desp: `object key invalid`, - data: `{"expression_invalid": "0 15,30,45 * ? * *"}`, - expect: Cron{}, - err: ``, + Name: "Invalid Empty String", + Input: FlowDirective{Value: ""}, + IsEnum: false, + ShouldErr: true, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Cron - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + // Validate the struct + err := validate.Var(tc.Input.Value, "required") + if tc.ShouldErr { + assert.Error(t, err, "expected an error, but got none") + } else { + assert.NoError(t, err, "expected no error, but got one") } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) + // Check IsEnum result + assert.Equal(t, tc.IsEnum, tc.Input.IsEnum(), "unexpected IsEnum result") }) } } -func TestTransitionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Transition - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"next state"`, - expect: Transition{ - NextState: "next state", - }, - err: ``, +func TestUse_MarshalJSON(t *testing.T) { + use := Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), }, - { - desp: `object success`, - data: `{"nextState": "next state"}`, - expect: Transition{ - NextState: "next state", - }, - err: ``, + Errors: map[string]*Error{ + "error1": {Type: NewUriTemplate("http://example.com/errors"), Status: 404}, }, - { - desp: `object fail`, - data: `{"nextState": "next state}`, - expect: Transition{ - NextState: "next state", + Extensions: ExtensionList{ + {Key: "ext1", Extension: &Extension{Extend: "call"}}, + {Key: "ext2", Extension: &Extension{Extend: "emit"}}, + {Key: "ext3", Extension: &Extension{Extend: "for"}}, + }, + Functions: NamedTaskMap{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com/"), Method: "GET"}}, + }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, }, - err: `unexpected end of JSON input`, }, - { - desp: `object key invalid`, - data: `{"nextState_invalid": "next state"}`, - expect: Transition{}, - err: ``, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Transition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } + data, err := json.Marshal(use) + assert.NoError(t, err) + assert.JSONEq(t, `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [ + {"ext1": {"extend": "call"}}, + {"ext2": {"extend": "emit"}}, + {"ext3": {"extend": "for"}} + ], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com/", "method": "GET"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }`, string(data)) } -func TestDataInputSchemaUnmarshalJSON(t *testing.T) { +func TestUse_UnmarshalJSON(t *testing.T) { + jsonData := `{ + "authentications": {"auth1": { "basic": {"username": "alice", "password": "secret"}}}, + "errors": {"error1": {"type": "http://example.com/errors", "status": 404}}, + "extensions": [{"ext1": {"extend": "call"}}], + "functions": {"func1": {"call": "http", "with": {"endpoint": "http://example.com"}}}, + "retries": {"retry1": {"delay": "PT5S", "limit": {"attempt": {"count": 3}}}}, + "secrets": ["secret1", "secret2"], + "timeouts": {"timeout1": {"after": "PT1M"}}, + "catalogs": {"catalog1": {"endpoint": "http://example.com"}} + }` - var schemaName Object - err := json.Unmarshal([]byte("{\"key\": \"value\"}"), &schemaName) - if !assert.NoError(t, err) { - return - } + var use Use + err := json.Unmarshal([]byte(jsonData), &use) + assert.NoError(t, err) - type testCase struct { - desp string - data string - expect DataInputSchema - err string - } + assert.NotNil(t, use.Authentications["auth1"]) + assert.Equal(t, "alice", use.Authentications["auth1"].Basic.Username) + assert.Equal(t, "secret", use.Authentications["auth1"].Basic.Password) - testCases := []testCase{ - { - desp: "string success", - data: "{\"key\": \"value\"}", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: "string fail", - data: "{\"key\": }", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `invalid character '}' looking for beginning of value`, - }, - { - desp: `object success (without quotes)`, - data: `{"key": "value"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success`, - data: `{"schema": "{\"key\": \"value\"}"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success (without quotes)`, - data: `{"schema": {"key": "value"}}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object fail`, - data: `{"schema": "schema name}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `unexpected end of JSON input`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DataInputSchema - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err, tc.desp) - assert.Regexp(t, tc.err, err, tc.desp) - return - } + assert.NotNil(t, use.Errors["error1"]) + assert.Equal(t, "http://example.com/errors", use.Errors["error1"].Type.String()) + assert.Equal(t, 404, use.Errors["error1"].Status) - assert.NoError(t, err, tc.desp) - assert.Equal(t, tc.expect.Schema, v.Schema, tc.desp) - assert.Equal(t, tc.expect.FailOnValidationErrors, v.FailOnValidationErrors, tc.desp) - }) - } + assert.NotNil(t, use.Extensions.Key("ext1")) + assert.Equal(t, "call", use.Extensions.Key("ext1").Extend) + + assert.NotNil(t, use.Functions["func1"]) + assert.IsType(t, &CallHTTP{With: HTTPArguments{Endpoint: NewEndpoint("http://example.com")}}, use.Functions["func1"]) + + assert.NotNil(t, use.Retries["retry1"]) + assert.Equal(t, "PT5S", use.Retries["retry1"].Delay.AsExpression()) + assert.Equal(t, 3, use.Retries["retry1"].Limit.Attempt.Count) + + assert.Equal(t, []string{"secret1", "secret2"}, use.Secrets) + + assert.NotNil(t, use.Timeouts["timeout1"]) + assert.Equal(t, "PT1M", use.Timeouts["timeout1"].After.AsExpression()) + + assert.NotNil(t, use.Catalogs["catalog1"]) + assert.Equal(t, "http://example.com", use.Catalogs["catalog1"].Endpoint.URITemplate.String()) } -func TestConstantsUnmarshalJSON(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"testkey":"testvalue"}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - util.HttpClient = *server.Client() - - type testCase struct { - desp string - data string - expect Constants - err string - } - testCases := []testCase{ - { - desp: "object success", - data: `{"testkey":"testvalue}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `unexpected end of JSON input`, +func TestUse_Validation(t *testing.T) { + use := &Use{ + Authentications: map[string]*AuthenticationPolicy{ + "auth1": NewBasicAuth("alice", "secret"), }, - { - desp: "object success", - data: `[]`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - // TODO: improve message: field is empty - err: `constants must be string or object`, + Errors: map[string]*Error{ + "error1": {Type: &URITemplateOrRuntimeExpr{&LiteralUri{"http://example.com/errors"}}, Status: 404}, }, - { - desp: "object success", - data: `{"testkey":"testvalue"}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: ``, + Extensions: ExtensionList{}, + Functions: map[string]Task{ + "func1": &CallHTTP{Call: "http", With: HTTPArguments{Endpoint: NewEndpoint("http://example.com"), Method: "GET"}}, }, - { - desp: "file success", - data: fmt.Sprintf(`"%s/test.json"`, server.URL), - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, + Retries: map[string]*RetryPolicy{ + "retry1": { + Delay: NewDurationExpr("PT5S"), + Limit: RetryLimit{Attempt: &RetryLimitAttempt{Count: 3}}, }, - err: ``, }, - { - desp: "file success", - data: `"uri_invalid"`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `file not found: "uri_invalid"`, + Secrets: []string{"secret1", "secret2"}, + Timeouts: map[string]*Timeout{"timeout1": {After: NewDurationExpr("PT1M")}}, + Catalogs: map[string]*Catalog{ + "catalog1": {Endpoint: NewEndpoint("http://example.com")}, }, } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Constants - err := json.Unmarshal([]byte(tc.data), &v) + err := validate.Struct(use) + assert.NoError(t, err) - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } + // Test with missing required fields + use.Catalogs["catalog1"].Endpoint = nil + err = validate.Struct(use) + assert.Error(t, err) - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) + var validationErrors validator.ValidationErrors + if errors.As(err, &validationErrors) { + for _, validationErr := range validationErrors { + t.Logf("Validation failed on field '%s' with tag '%s'", validationErr.Namespace(), validationErr.Tag()) + } + + assert.Contains(t, validationErrors.Error(), "Catalogs[catalog1].Endpoint") + assert.Contains(t, validationErrors.Error(), "required") } } diff --git a/model/workflow_validator.go b/model/workflow_validator.go deleted file mode 100644 index dd9d1e7..0000000 --- a/model/workflow_validator.go +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -type contextValueKey string - -const ValidatorContextValue contextValueKey = "value" - -type WorkflowValidator func(mapValues ValidatorContext, sl validator.StructLevel) - -func ValidationWrap(fnCtx WorkflowValidator) validator.StructLevelFuncCtx { - return func(ctx context.Context, structLevel validator.StructLevel) { - if fnCtx != nil { - if mapValues, ok := ctx.Value(ValidatorContextValue).(ValidatorContext); ok { - fnCtx(mapValues, structLevel) - } - } - } -} - -// +builder-gen:ignore=true -type ValidatorContext struct { - States map[string]State - Functions map[string]Function - Events map[string]Event - Retries map[string]Retry - Errors map[string]Error -} - -func (c *ValidatorContext) init(workflow *Workflow) { - c.States = make(map[string]State, len(workflow.States)) - for _, state := range workflow.States { - c.States[state.BaseState.Name] = state - } - - c.Functions = make(map[string]Function, len(workflow.Functions)) - for _, function := range workflow.Functions { - c.Functions[function.Name] = function - } - - c.Events = make(map[string]Event, len(workflow.Events)) - for _, event := range workflow.Events { - c.Events[event.Name] = event - } - - c.Retries = make(map[string]Retry, len(workflow.Retries)) - for _, retry := range workflow.Retries { - c.Retries[retry.Name] = retry - } - - c.Errors = make(map[string]Error, len(workflow.Errors)) - for _, error := range workflow.Errors { - c.Errors[error.Name] = error - } -} - -func (c *ValidatorContext) ExistState(name string) bool { - if c.States == nil { - return true - } - _, ok := c.States[name] - return ok -} - -func (c *ValidatorContext) ExistFunction(name string) bool { - if c.Functions == nil { - return true - } - _, ok := c.Functions[name] - return ok -} - -func (c *ValidatorContext) ExistEvent(name string) bool { - if c.Events == nil { - return true - } - _, ok := c.Events[name] - return ok -} - -func (c *ValidatorContext) ExistRetry(name string) bool { - if c.Retries == nil { - return true - } - _, ok := c.Retries[name] - return ok -} - -func (c *ValidatorContext) ExistError(name string) bool { - if c.Errors == nil { - return true - } - _, ok := c.Errors[name] - return ok -} - -func NewValidatorContext(object any) context.Context { - contextValue := ValidatorContext{} - - if workflow, ok := object.(*Workflow); ok { - for i := range workflow.States { - s := &workflow.States[i] - if s.BaseState.Transition != nil { - s.BaseState.Transition.stateParent = s - } - for _, onError := range s.BaseState.OnErrors { - if onError.Transition != nil { - onError.Transition.stateParent = s - } - } - if s.Type == StateTypeSwitch { - if s.SwitchState.DefaultCondition.Transition != nil { - s.SwitchState.DefaultCondition.Transition.stateParent = s - } - for _, e := range s.SwitchState.EventConditions { - if e.Transition != nil { - e.Transition.stateParent = s - } - } - for _, d := range s.SwitchState.DataConditions { - if d.Transition != nil { - d.Transition.stateParent = s - } - } - } - } - contextValue.init(workflow) - } - - return context.WithValue(context.Background(), ValidatorContextValue, contextValue) -} - -func init() { - // TODO: create states graph to complex check - - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onErrorStructLevelValidationCtx), OnError{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(transitionStructLevelValidationCtx), Transition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(startStructLevelValidationCtx), Start{}) -} - -func startStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - start := structLevel.Current().Interface().(Start) - if start.StateName != "" && !ctx.ExistState(start.StateName) { - structLevel.ReportError(start.StateName, "StateName", "stateName", val.TagExists, "") - return - } -} - -func onErrorStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onError := structLevel.Current().Interface().(OnError) - hasErrorRef := onError.ErrorRef != "" - hasErrorRefs := len(onError.ErrorRefs) > 0 - - if !hasErrorRef && !hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagRequired, "") - } else if hasErrorRef && hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExclusive, "") - return - } - - if onError.ErrorRef != "" && !ctx.ExistError(onError.ErrorRef) { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExists, "") - } - - for _, errorRef := range onError.ErrorRefs { - if !ctx.ExistError(errorRef) { - structLevel.ReportError(onError.ErrorRefs, "ErrorRefs", "ErrorRefs", val.TagExists, "") - } - } -} - -func transitionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // Naive check if transitions exist - transition := structLevel.Current().Interface().(Transition) - if ctx.ExistState(transition.NextState) { - if transition.stateParent != nil { - parentBaseState := transition.stateParent - - if parentBaseState.Name == transition.NextState { - // TODO: Improve recursive check - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagRecursiveState, parentBaseState.Name) - } - - if parentBaseState.UsedForCompensation && !ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionUseForCompensation, "") - } - - if !parentBaseState.UsedForCompensation && ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionMainWorkflow, "") - } - } - - } else { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagExists, "") - } -} - -func validTransitionAndEnd(structLevel validator.StructLevel, field any, transition *Transition, end *End) { - hasTransition := transition != nil - isEnd := end != nil && (end.Terminate || end.Compensate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end - - if !hasTransition && !isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagRequired, "") - } else if hasTransition && isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagExclusive, "") - } -} - -func validationNotExclusiveParameters(values []bool) bool { - hasOne := false - hasTwo := false - - for i, val1 := range values { - if val1 { - hasOne = true - for j, val2 := range values { - if i != j && val2 { - hasTwo = true - break - } - } - break - } - } - - return hasOne && hasTwo -} diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go deleted file mode 100644 index 2a6b5a0..0000000 --- a/model/workflow_validator_test.go +++ /dev/null @@ -1,544 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func buildWorkflow() *Workflow { - return &Workflow{ - BaseWorkflow: BaseWorkflow{ - ID: "id", - Key: "key", - Name: "name", - SpecVersion: "0.8", - Version: "0.1", - ExpressionLang: JqExpressionLang, - }, - } -} - -func buildEndByState(state *State, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - state.BaseState.End = end - return end -} - -func buildEndByDefaultCondition(defaultCondition *DefaultCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - defaultCondition.End = end - return end -} - -func buildEndByDataCondition(dataCondition *DataCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - dataCondition.End = end - return end -} - -func buildEndByEventCondition(eventCondition *EventCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - eventCondition.End = end - return end -} - -func buildStart(workflow *Workflow, state *State) { - start := &Start{ - StateName: state.BaseState.Name, - } - workflow.BaseWorkflow.Start = start -} - -func buildTransitionByState(state, nextState *State, compensate bool) { - state.BaseState.Transition = &Transition{ - NextState: nextState.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDataCondition(dataCondition *DataCondition, state *State, compensate bool) { - dataCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByEventCondition(eventCondition *EventCondition, state *State, compensate bool) { - eventCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDefaultCondition(defaultCondition *DefaultCondition, state *State) { - defaultCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - } -} - -func buildTimeouts(workflow *Workflow) *Timeouts { - timeouts := Timeouts{} - workflow.BaseWorkflow.Timeouts = &timeouts - return workflow.BaseWorkflow.Timeouts -} - -func TestBaseWorkflowStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "id exclude key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "id" - model.Key = "" - return *model - }, - }, - { - Desp: "key exclude id", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "key" - return *model - }, - }, - { - Desp: "without id and key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "" - return *model - }, - Err: `workflow.id required when "workflow.key" is not defined -workflow.key required when "workflow.id" is not defined`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.ExpressionLang = JqExpressionLang + "invalid" - return *model - }, - Err: `workflow.expressionLang need by one of [jq jsonpath cel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestContinueAsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].BaseState.End.ContinueAs = &ContinueAs{ - WorkflowID: "sub workflow", - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "P1M", - }, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End.ContinueAs.WorkflowID = "" - return *model - }, - Err: `workflow.states[0].end.continueAs.workflowID is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOnErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - baseWorkflow.States[0].BaseState.OnErrors = []OnError{{ - ErrorRef: "error 1", - }, { - ErrorRefs: []string{"error 1", "error 2"}, - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[0].ErrorRef = "error 1" - model.States[0].OnErrors[0].ErrorRefs = []string{"error 2"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists and exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists errorRef", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef don't exist "invalid error name"`, - }, - { - Desp: "exists errorRefs", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRefs don't exist ["invalid error name"]`, - }, - { - Desp: "duplicate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[1].ErrorRefs = []string{"error 1", "error 1"} - return *model - }, - Err: `workflow.states[0].onErrors[1].errorRefs has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStartStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildStart(baseWorkflow, operationState) - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "" - return *model - }, - Err: `workflow.start.stateName is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "start state not found" - return *model - }, - Err: `workflow.start.stateName don't exist "start state not found"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestTransitionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 5) - - operationState := buildOperationState(baseWorkflow, "start state") - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - operationState2.BaseState.CompensatedBy = "compensation next state 1" - action2 := buildActionByOperationState(operationState2, "action 1") - buildFunctionRef(baseWorkflow, action2, "function 2") - - buildTransitionByState(operationState, operationState2, false) - - operationState3 := buildOperationState(baseWorkflow, "compensation next state 1") - operationState3.BaseState.UsedForCompensation = true - action3 := buildActionByOperationState(operationState3, "action 1") - buildFunctionRef(baseWorkflow, action3, "function 3") - - operationState4 := buildOperationState(baseWorkflow, "compensation next state 2") - operationState4.BaseState.UsedForCompensation = true - action4 := buildActionByOperationState(operationState4, "action 1") - buildFunctionRef(baseWorkflow, action4, "function 4") - - buildTransitionByState(operationState3, operationState4, false) - - operationState5 := buildOperationState(baseWorkflow, "compensation next state 3") - buildEndByState(operationState5, true, false) - operationState5.BaseState.UsedForCompensation = true - action5 := buildActionByOperationState(operationState5, "action 5") - buildFunctionRef(baseWorkflow, action5, "function 5") - - buildTransitionByState(operationState4, operationState5, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "state recursive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].transition.nextState can't no be recursive "start state"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = "invalid next state" - return *model - }, - Err: `workflow.states[0].transition.nextState don't exist "invalid next state"`, - }, - { - Desp: "transitionusedforcompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[3].BaseState.UsedForCompensation = false - return *model - }, - Err: `Key: 'Workflow.States[2].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transitionusedforcompensation' tag -Key: 'Workflow.States[3].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - { - Desp: "transtionmainworkflow", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[3].BaseState.Name - return *model - }, - Err: `Key: 'Workflow.States[0].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataInputSchemaStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - sampleSchema := FromString("sample schema") - - testCases := []ValidationCase{ - { - Desp: "empty DataInputSchema", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{} - return *model - }, - Err: `workflow.dataInputSchema.schema is required`, - }, - { - Desp: "filled Schema, default failOnValidationErrors", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{ - Schema: &sampleSchema, - } - return *model - }, - }, - } - - //fmt.Printf("%+v", testCases[0].Model) - StructLevelValidationCtx(t, testCases) -} - -func TestSecretsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "workflow secrets.name repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Secrets = []string{"secret 1", "secret 1"} - return *model - }, - Err: `workflow.secrets has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors[0].Name = "" - return *model - }, - Err: `workflow.errors[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors = Errors{model.Errors[0], model.Errors[0]} - return *model - }, - Err: `workflow.errors has duplicate "name"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -type ValidationCase struct { - Desp string - Model func() Workflow - Err string -} - -func StructLevelValidationCtx(t *testing.T, testCases []ValidationCase) { - for _, tc := range testCases { - t.Run(tc.Desp, func(t *testing.T) { - model := tc.Model() - err := val.GetValidator().StructCtx(NewValidatorContext(&model), model) - err = val.WorkflowError(err) - if tc.Err != "" { - if assert.Error(t, err) { - assert.Equal(t, tc.Err, err.Error()) - } - } else { - assert.NoError(t, err) - } - }) - } -} diff --git a/model/zz_generated.buildergen.go b/model/zz_generated.buildergen.go deleted file mode 100644 index 42564fe..0000000 --- a/model/zz_generated.buildergen.go +++ /dev/null @@ -1,3139 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by builder-gen. DO NOT EDIT. - -package model - -import ( - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionBuilder() *ActionBuilder { - builder := &ActionBuilder{} - builder.model = Action{} - builder.model.ApplyDefault() - builder.actiondatafilter = NewActionDataFilterBuilder() - return builder -} - -type ActionBuilder struct { - model Action - functionref *FunctionRefBuilder - eventref *EventRefBuilder - subflowref *WorkflowRefBuilder - sleep *SleepBuilder - actiondatafilter *ActionDataFilterBuilder -} - -func (b *ActionBuilder) ID(input string) *ActionBuilder { - b.model.ID = input - return b -} - -func (b *ActionBuilder) Name(input string) *ActionBuilder { - b.model.Name = input - return b -} - -func (b *ActionBuilder) FunctionRef() *FunctionRefBuilder { - if b.functionref == nil { - b.functionref = NewFunctionRefBuilder() - } - return b.functionref -} - -func (b *ActionBuilder) EventRef() *EventRefBuilder { - if b.eventref == nil { - b.eventref = NewEventRefBuilder() - } - return b.eventref -} - -func (b *ActionBuilder) SubFlowRef() *WorkflowRefBuilder { - if b.subflowref == nil { - b.subflowref = NewWorkflowRefBuilder() - } - return b.subflowref -} - -func (b *ActionBuilder) Sleep() *SleepBuilder { - if b.sleep == nil { - b.sleep = NewSleepBuilder() - } - return b.sleep -} - -func (b *ActionBuilder) RetryRef(input string) *ActionBuilder { - b.model.RetryRef = input - return b -} - -func (b *ActionBuilder) NonRetryableErrors(input []string) *ActionBuilder { - b.model.NonRetryableErrors = input - return b -} - -func (b *ActionBuilder) RetryableErrors(input []string) *ActionBuilder { - b.model.RetryableErrors = input - return b -} - -func (b *ActionBuilder) ActionDataFilter() *ActionDataFilterBuilder { - return b.actiondatafilter -} - -func (b *ActionBuilder) Condition(input string) *ActionBuilder { - b.model.Condition = input - return b -} - -func (b *ActionBuilder) Build() Action { - if b.functionref != nil { - functionref := b.functionref.Build() - b.model.FunctionRef = &functionref - } - if b.eventref != nil { - eventref := b.eventref.Build() - b.model.EventRef = &eventref - } - if b.subflowref != nil { - subflowref := b.subflowref.Build() - b.model.SubFlowRef = &subflowref - } - if b.sleep != nil { - sleep := b.sleep.Build() - b.model.Sleep = &sleep - } - b.model.ActionDataFilter = b.actiondatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionDataFilterBuilder() *ActionDataFilterBuilder { - builder := &ActionDataFilterBuilder{} - builder.model = ActionDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type ActionDataFilterBuilder struct { - model ActionDataFilter -} - -func (b *ActionDataFilterBuilder) FromStateData(input string) *ActionDataFilterBuilder { - b.model.FromStateData = input - return b -} - -func (b *ActionDataFilterBuilder) UseResults(input bool) *ActionDataFilterBuilder { - b.model.UseResults = input - return b -} - -func (b *ActionDataFilterBuilder) Results(input string) *ActionDataFilterBuilder { - b.model.Results = input - return b -} - -func (b *ActionDataFilterBuilder) ToStateData(input string) *ActionDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *ActionDataFilterBuilder) Build() ActionDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthBuilder() *AuthBuilder { - builder := &AuthBuilder{} - builder.model = Auth{} - builder.properties = NewAuthPropertiesBuilder() - return builder -} - -type AuthBuilder struct { - model Auth - properties *AuthPropertiesBuilder -} - -func (b *AuthBuilder) Name(input string) *AuthBuilder { - b.model.Name = input - return b -} - -func (b *AuthBuilder) Scheme(input AuthType) *AuthBuilder { - b.model.Scheme = input - return b -} - -func (b *AuthBuilder) Properties() *AuthPropertiesBuilder { - return b.properties -} - -func (b *AuthBuilder) Build() Auth { - b.model.Properties = b.properties.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthPropertiesBuilder() *AuthPropertiesBuilder { - builder := &AuthPropertiesBuilder{} - builder.model = AuthProperties{} - return builder -} - -type AuthPropertiesBuilder struct { - model AuthProperties - basic *BasicAuthPropertiesBuilder - bearer *BearerAuthPropertiesBuilder - oauth2 *OAuth2AuthPropertiesBuilder -} - -func (b *AuthPropertiesBuilder) Basic() *BasicAuthPropertiesBuilder { - if b.basic == nil { - b.basic = NewBasicAuthPropertiesBuilder() - } - return b.basic -} - -func (b *AuthPropertiesBuilder) Bearer() *BearerAuthPropertiesBuilder { - if b.bearer == nil { - b.bearer = NewBearerAuthPropertiesBuilder() - } - return b.bearer -} - -func (b *AuthPropertiesBuilder) OAuth2() *OAuth2AuthPropertiesBuilder { - if b.oauth2 == nil { - b.oauth2 = NewOAuth2AuthPropertiesBuilder() - } - return b.oauth2 -} - -func (b *AuthPropertiesBuilder) Build() AuthProperties { - if b.basic != nil { - basic := b.basic.Build() - b.model.Basic = &basic - } - if b.bearer != nil { - bearer := b.bearer.Build() - b.model.Bearer = &bearer - } - if b.oauth2 != nil { - oauth2 := b.oauth2.Build() - b.model.OAuth2 = &oauth2 - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthsBuilder() *AuthsBuilder { - builder := &AuthsBuilder{} - builder.model = Auths{} - return builder -} - -type AuthsBuilder struct { - model Auths -} - -func (b *AuthsBuilder) Build() Auths { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseStateBuilder() *BaseStateBuilder { - builder := &BaseStateBuilder{} - builder.model = BaseState{} - builder.onerrors = []*OnErrorBuilder{} - return builder -} - -type BaseStateBuilder struct { - model BaseState - onerrors []*OnErrorBuilder - transition *TransitionBuilder - statedatafilter *StateDataFilterBuilder - end *EndBuilder -} - -func (b *BaseStateBuilder) ID(input string) *BaseStateBuilder { - b.model.ID = input - return b -} - -func (b *BaseStateBuilder) Name(input string) *BaseStateBuilder { - b.model.Name = input - return b -} - -func (b *BaseStateBuilder) Type(input StateType) *BaseStateBuilder { - b.model.Type = input - return b -} - -func (b *BaseStateBuilder) AddOnErrors() *OnErrorBuilder { - builder := NewOnErrorBuilder() - b.onerrors = append(b.onerrors, builder) - return builder -} - -func (b *BaseStateBuilder) RemoveOnErrors(remove *OnErrorBuilder) { - for i, val := range b.onerrors { - if val == remove { - b.onerrors[i] = b.onerrors[len(b.onerrors)-1] - b.onerrors = b.onerrors[:len(b.onerrors)-1] - } - } -} -func (b *BaseStateBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *BaseStateBuilder) StateDataFilter() *StateDataFilterBuilder { - if b.statedatafilter == nil { - b.statedatafilter = NewStateDataFilterBuilder() - } - return b.statedatafilter -} - -func (b *BaseStateBuilder) CompensatedBy(input string) *BaseStateBuilder { - b.model.CompensatedBy = input - return b -} - -func (b *BaseStateBuilder) UsedForCompensation(input bool) *BaseStateBuilder { - b.model.UsedForCompensation = input - return b -} - -func (b *BaseStateBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *BaseStateBuilder) Build() BaseState { - b.model.OnErrors = []OnError{} - for _, v := range b.onerrors { - b.model.OnErrors = append(b.model.OnErrors, v.Build()) - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.statedatafilter != nil { - statedatafilter := b.statedatafilter.Build() - b.model.StateDataFilter = &statedatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseWorkflowBuilder() *BaseWorkflowBuilder { - builder := &BaseWorkflowBuilder{} - builder.model = BaseWorkflow{} - builder.model.ApplyDefault() - builder.errors = []*ErrorBuilder{} - builder.auth = []*AuthBuilder{} - return builder -} - -type BaseWorkflowBuilder struct { - model BaseWorkflow - start *StartBuilder - datainputschema *DataInputSchemaBuilder - constants *ConstantsBuilder - timeouts *TimeoutsBuilder - errors []*ErrorBuilder - auth []*AuthBuilder -} - -func (b *BaseWorkflowBuilder) ID(input string) *BaseWorkflowBuilder { - b.model.ID = input - return b -} - -func (b *BaseWorkflowBuilder) Key(input string) *BaseWorkflowBuilder { - b.model.Key = input - return b -} - -func (b *BaseWorkflowBuilder) Name(input string) *BaseWorkflowBuilder { - b.model.Name = input - return b -} - -func (b *BaseWorkflowBuilder) Description(input string) *BaseWorkflowBuilder { - b.model.Description = input - return b -} - -func (b *BaseWorkflowBuilder) Version(input string) *BaseWorkflowBuilder { - b.model.Version = input - return b -} - -func (b *BaseWorkflowBuilder) Start() *StartBuilder { - if b.start == nil { - b.start = NewStartBuilder() - } - return b.start -} - -func (b *BaseWorkflowBuilder) Annotations(input []string) *BaseWorkflowBuilder { - b.model.Annotations = input - return b -} - -func (b *BaseWorkflowBuilder) DataInputSchema() *DataInputSchemaBuilder { - if b.datainputschema == nil { - b.datainputschema = NewDataInputSchemaBuilder() - } - return b.datainputschema -} - -func (b *BaseWorkflowBuilder) SpecVersion(input string) *BaseWorkflowBuilder { - b.model.SpecVersion = input - return b -} - -func (b *BaseWorkflowBuilder) Secrets(input Secrets) *BaseWorkflowBuilder { - b.model.Secrets = input - return b -} - -func (b *BaseWorkflowBuilder) Constants() *ConstantsBuilder { - if b.constants == nil { - b.constants = NewConstantsBuilder() - } - return b.constants -} - -func (b *BaseWorkflowBuilder) ExpressionLang(input ExpressionLangType) *BaseWorkflowBuilder { - b.model.ExpressionLang = input - return b -} - -func (b *BaseWorkflowBuilder) Timeouts() *TimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BaseWorkflowBuilder) AddErrors() *ErrorBuilder { - builder := NewErrorBuilder() - b.errors = append(b.errors, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveErrors(remove *ErrorBuilder) { - for i, val := range b.errors { - if val == remove { - b.errors[i] = b.errors[len(b.errors)-1] - b.errors = b.errors[:len(b.errors)-1] - } - } -} -func (b *BaseWorkflowBuilder) KeepActive(input bool) *BaseWorkflowBuilder { - b.model.KeepActive = input - return b -} - -func (b *BaseWorkflowBuilder) Metadata(input Metadata) *BaseWorkflowBuilder { - b.model.Metadata = input - return b -} - -func (b *BaseWorkflowBuilder) AutoRetries(input bool) *BaseWorkflowBuilder { - b.model.AutoRetries = input - return b -} - -func (b *BaseWorkflowBuilder) AddAuth() *AuthBuilder { - builder := NewAuthBuilder() - b.auth = append(b.auth, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveAuth(remove *AuthBuilder) { - for i, val := range b.auth { - if val == remove { - b.auth[i] = b.auth[len(b.auth)-1] - b.auth = b.auth[:len(b.auth)-1] - } - } -} -func (b *BaseWorkflowBuilder) Build() BaseWorkflow { - if b.start != nil { - start := b.start.Build() - b.model.Start = &start - } - if b.datainputschema != nil { - datainputschema := b.datainputschema.Build() - b.model.DataInputSchema = &datainputschema - } - if b.constants != nil { - constants := b.constants.Build() - b.model.Constants = &constants - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - b.model.Errors = []Error{} - for _, v := range b.errors { - b.model.Errors = append(b.model.Errors, v.Build()) - } - b.model.Auth = []Auth{} - for _, v := range b.auth { - b.model.Auth = append(b.model.Auth, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBasicAuthPropertiesBuilder() *BasicAuthPropertiesBuilder { - builder := &BasicAuthPropertiesBuilder{} - builder.model = BasicAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BasicAuthPropertiesBuilder struct { - model BasicAuthProperties - CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Secret(input string) *BasicAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Username(input string) *BasicAuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Password(input string) *BasicAuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Build() BasicAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBearerAuthPropertiesBuilder() *BearerAuthPropertiesBuilder { - builder := &BearerAuthPropertiesBuilder{} - builder.model = BearerAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BearerAuthPropertiesBuilder struct { - model BearerAuthProperties - CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Secret(input string) *BearerAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Token(input string) *BearerAuthPropertiesBuilder { - b.model.Token = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Build() BearerAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchBuilder() *BranchBuilder { - builder := &BranchBuilder{} - builder.model = Branch{} - builder.actions = []*ActionBuilder{} - return builder -} - -type BranchBuilder struct { - model Branch - actions []*ActionBuilder - timeouts *BranchTimeoutsBuilder -} - -func (b *BranchBuilder) Name(input string) *BranchBuilder { - b.model.Name = input - return b -} - -func (b *BranchBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *BranchBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *BranchBuilder) Timeouts() *BranchTimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewBranchTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BranchBuilder) Build() Branch { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchTimeoutsBuilder() *BranchTimeoutsBuilder { - builder := &BranchTimeoutsBuilder{} - builder.model = BranchTimeouts{} - return builder -} - -type BranchTimeoutsBuilder struct { - model BranchTimeouts -} - -func (b *BranchTimeoutsBuilder) ActionExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) BranchExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) Build() BranchTimeouts { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateBuilder() *CallbackStateBuilder { - builder := &CallbackStateBuilder{} - builder.model = CallbackState{} - builder.action = NewActionBuilder() - return builder -} - -type CallbackStateBuilder struct { - model CallbackState - action *ActionBuilder - timeouts *CallbackStateTimeoutBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *CallbackStateBuilder) Action() *ActionBuilder { - return b.action -} - -func (b *CallbackStateBuilder) EventRef(input string) *CallbackStateBuilder { - b.model.EventRef = input - return b -} - -func (b *CallbackStateBuilder) Timeouts() *CallbackStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewCallbackStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *CallbackStateBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *CallbackStateBuilder) Build() CallbackState { - b.model.Action = b.action.Build() - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateTimeoutBuilder() *CallbackStateTimeoutBuilder { - builder := &CallbackStateTimeoutBuilder{} - builder.model = CallbackStateTimeout{} - return builder -} - -type CallbackStateTimeoutBuilder struct { - model CallbackStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *CallbackStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *CallbackStateTimeoutBuilder) ActionExecTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) EventTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) Build() CallbackStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCommonBuilder() *CommonBuilder { - builder := &CommonBuilder{} - builder.model = Common{} - return builder -} - -type CommonBuilder struct { - model Common -} - -func (b *CommonBuilder) Metadata(input Metadata) *CommonBuilder { - b.model.Metadata = input - return b -} - -func (b *CommonBuilder) Build() Common { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsBuilder() *ConstantsBuilder { - builder := &ConstantsBuilder{} - builder.model = Constants{} - return builder -} - -type ConstantsBuilder struct { - model Constants -} - -func (b *ConstantsBuilder) Data(input ConstantsData) *ConstantsBuilder { - b.model.Data = input - return b -} - -func (b *ConstantsBuilder) Build() Constants { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsDataBuilder() *ConstantsDataBuilder { - builder := &ConstantsDataBuilder{} - builder.model = ConstantsData{} - return builder -} - -type ConstantsDataBuilder struct { - model ConstantsData -} - -func (b *ConstantsDataBuilder) Build() ConstantsData { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewContinueAsBuilder() *ContinueAsBuilder { - builder := &ContinueAsBuilder{} - builder.model = ContinueAs{} - builder.data = NewObjectBuilder() - builder.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - return builder -} - -type ContinueAsBuilder struct { - model ContinueAs - data *ObjectBuilder - workflowexectimeout *WorkflowExecTimeoutBuilder -} - -func (b *ContinueAsBuilder) WorkflowID(input string) *ContinueAsBuilder { - b.model.WorkflowID = input - return b -} - -func (b *ContinueAsBuilder) Version(input string) *ContinueAsBuilder { - b.model.Version = input - return b -} - -func (b *ContinueAsBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ContinueAsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - return b.workflowexectimeout -} - -func (b *ContinueAsBuilder) Build() ContinueAs { - b.model.Data = b.data.Build() - b.model.WorkflowExecTimeout = b.workflowexectimeout.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCorrelationBuilder() *CorrelationBuilder { - builder := &CorrelationBuilder{} - builder.model = Correlation{} - return builder -} - -type CorrelationBuilder struct { - model Correlation -} - -func (b *CorrelationBuilder) ContextAttributeName(input string) *CorrelationBuilder { - b.model.ContextAttributeName = input - return b -} - -func (b *CorrelationBuilder) ContextAttributeValue(input string) *CorrelationBuilder { - b.model.ContextAttributeValue = input - return b -} - -func (b *CorrelationBuilder) Build() Correlation { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCronBuilder() *CronBuilder { - builder := &CronBuilder{} - builder.model = Cron{} - return builder -} - -type CronBuilder struct { - model Cron -} - -func (b *CronBuilder) Expression(input string) *CronBuilder { - b.model.Expression = input - return b -} - -func (b *CronBuilder) ValidUntil(input string) *CronBuilder { - b.model.ValidUntil = input - return b -} - -func (b *CronBuilder) Build() Cron { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataConditionBuilder() *DataConditionBuilder { - builder := &DataConditionBuilder{} - builder.model = DataCondition{} - return builder -} - -type DataConditionBuilder struct { - model DataCondition - end *EndBuilder - transition *TransitionBuilder -} - -func (b *DataConditionBuilder) Name(input string) *DataConditionBuilder { - b.model.Name = input - return b -} - -func (b *DataConditionBuilder) Condition(input string) *DataConditionBuilder { - b.model.Condition = input - return b -} - -func (b *DataConditionBuilder) Metadata(input Metadata) *DataConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *DataConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DataConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DataConditionBuilder) Build() DataCondition { - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataInputSchemaBuilder() *DataInputSchemaBuilder { - builder := &DataInputSchemaBuilder{} - builder.model = DataInputSchema{} - builder.model.ApplyDefault() - return builder -} - -type DataInputSchemaBuilder struct { - model DataInputSchema - schema *ObjectBuilder -} - -func (b *DataInputSchemaBuilder) Schema() *ObjectBuilder { - if b.schema == nil { - b.schema = NewObjectBuilder() - } - return b.schema -} - -func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSchemaBuilder { - b.model.FailOnValidationErrors = input - return b -} - -func (b *DataInputSchemaBuilder) Build() DataInputSchema { - if b.schema != nil { - schema := b.schema.Build() - b.model.Schema = &schema - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDefaultConditionBuilder() *DefaultConditionBuilder { - builder := &DefaultConditionBuilder{} - builder.model = DefaultCondition{} - return builder -} - -type DefaultConditionBuilder struct { - model DefaultCondition - transition *TransitionBuilder - end *EndBuilder -} - -func (b *DefaultConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DefaultConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DefaultConditionBuilder) Build() DefaultCondition { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDelayStateBuilder() *DelayStateBuilder { - builder := &DelayStateBuilder{} - builder.model = DelayState{} - return builder -} - -type DelayStateBuilder struct { - model DelayState -} - -func (b *DelayStateBuilder) TimeDelay(input string) *DelayStateBuilder { - b.model.TimeDelay = input - return b -} - -func (b *DelayStateBuilder) Build() DelayState { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEndBuilder() *EndBuilder { - builder := &EndBuilder{} - builder.model = End{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type EndBuilder struct { - model End - produceevents []*ProduceEventBuilder - continueas *ContinueAsBuilder -} - -func (b *EndBuilder) Terminate(input bool) *EndBuilder { - b.model.Terminate = input - return b -} - -func (b *EndBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *EndBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *EndBuilder) Compensate(input bool) *EndBuilder { - b.model.Compensate = input - return b -} - -func (b *EndBuilder) ContinueAs() *ContinueAsBuilder { - if b.continueas == nil { - b.continueas = NewContinueAsBuilder() - } - return b.continueas -} - -func (b *EndBuilder) Build() End { - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - if b.continueas != nil { - continueas := b.continueas.Build() - b.model.ContinueAs = &continueas - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorBuilder() *ErrorBuilder { - builder := &ErrorBuilder{} - builder.model = Error{} - return builder -} - -type ErrorBuilder struct { - model Error -} - -func (b *ErrorBuilder) Name(input string) *ErrorBuilder { - b.model.Name = input - return b -} - -func (b *ErrorBuilder) Code(input string) *ErrorBuilder { - b.model.Code = input - return b -} - -func (b *ErrorBuilder) Description(input string) *ErrorBuilder { - b.model.Description = input - return b -} - -func (b *ErrorBuilder) Build() Error { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorsBuilder() *ErrorsBuilder { - builder := &ErrorsBuilder{} - builder.model = Errors{} - return builder -} - -type ErrorsBuilder struct { - model Errors -} - -func (b *ErrorsBuilder) Build() Errors { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventBuilder() *EventBuilder { - builder := &EventBuilder{} - builder.model = Event{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - builder.correlation = []*CorrelationBuilder{} - return builder -} - -type EventBuilder struct { - model Event - CommonBuilder - correlation []*CorrelationBuilder -} - -func (b *EventBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *EventBuilder) Name(input string) *EventBuilder { - b.model.Name = input - return b -} - -func (b *EventBuilder) Source(input string) *EventBuilder { - b.model.Source = input - return b -} - -func (b *EventBuilder) Type(input string) *EventBuilder { - b.model.Type = input - return b -} - -func (b *EventBuilder) Kind(input EventKind) *EventBuilder { - b.model.Kind = input - return b -} - -func (b *EventBuilder) DataOnly(input bool) *EventBuilder { - b.model.DataOnly = input - return b -} - -func (b *EventBuilder) AddCorrelation() *CorrelationBuilder { - builder := NewCorrelationBuilder() - b.correlation = append(b.correlation, builder) - return builder -} - -func (b *EventBuilder) RemoveCorrelation(remove *CorrelationBuilder) { - for i, val := range b.correlation { - if val == remove { - b.correlation[i] = b.correlation[len(b.correlation)-1] - b.correlation = b.correlation[:len(b.correlation)-1] - } - } -} -func (b *EventBuilder) Build() Event { - b.model.Common = b.CommonBuilder.Build() - b.model.Correlation = []Correlation{} - for _, v := range b.correlation { - b.model.Correlation = append(b.model.Correlation, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionBuilder() *EventConditionBuilder { - builder := &EventConditionBuilder{} - builder.model = EventCondition{} - return builder -} - -type EventConditionBuilder struct { - model EventCondition - eventdatafilter *EventDataFilterBuilder - end *EndBuilder - transition *TransitionBuilder -} - -func (b *EventConditionBuilder) Name(input string) *EventConditionBuilder { - b.model.Name = input - return b -} - -func (b *EventConditionBuilder) EventRef(input string) *EventConditionBuilder { - b.model.EventRef = input - return b -} - -func (b *EventConditionBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *EventConditionBuilder) Metadata(input Metadata) *EventConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *EventConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *EventConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *EventConditionBuilder) Build() EventCondition { - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionsBuilder() *EventConditionsBuilder { - builder := &EventConditionsBuilder{} - builder.model = EventConditions{} - return builder -} - -type EventConditionsBuilder struct { - model EventConditions -} - -func (b *EventConditionsBuilder) Build() EventConditions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventDataFilterBuilder() *EventDataFilterBuilder { - builder := &EventDataFilterBuilder{} - builder.model = EventDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type EventDataFilterBuilder struct { - model EventDataFilter -} - -func (b *EventDataFilterBuilder) UseData(input bool) *EventDataFilterBuilder { - b.model.UseData = input - return b -} - -func (b *EventDataFilterBuilder) Data(input string) *EventDataFilterBuilder { - b.model.Data = input - return b -} - -func (b *EventDataFilterBuilder) ToStateData(input string) *EventDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *EventDataFilterBuilder) Build() EventDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventRefBuilder() *EventRefBuilder { - builder := &EventRefBuilder{} - builder.model = EventRef{} - builder.model.ApplyDefault() - return builder -} - -type EventRefBuilder struct { - model EventRef - data *ObjectBuilder -} - -func (b *EventRefBuilder) TriggerEventRef(input string) *EventRefBuilder { - b.model.TriggerEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventRef(input string) *EventRefBuilder { - b.model.ResultEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventTimeout(input string) *EventRefBuilder { - b.model.ResultEventTimeout = input - return b -} - -func (b *EventRefBuilder) Data() *ObjectBuilder { - if b.data == nil { - b.data = NewObjectBuilder() - } - return b.data -} - -func (b *EventRefBuilder) ContextAttributes(input map[string]Object) *EventRefBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *EventRefBuilder) Invoke(input InvokeKind) *EventRefBuilder { - b.model.Invoke = input - return b -} - -func (b *EventRefBuilder) Build() EventRef { - if b.data != nil { - data := b.data.Build() - b.model.Data = &data - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateBuilder() *EventStateBuilder { - builder := &EventStateBuilder{} - builder.model = EventState{} - builder.model.ApplyDefault() - builder.onevents = []*OnEventsBuilder{} - return builder -} - -type EventStateBuilder struct { - model EventState - onevents []*OnEventsBuilder - timeouts *EventStateTimeoutBuilder -} - -func (b *EventStateBuilder) Exclusive(input bool) *EventStateBuilder { - b.model.Exclusive = input - return b -} - -func (b *EventStateBuilder) AddOnEvents() *OnEventsBuilder { - builder := NewOnEventsBuilder() - b.onevents = append(b.onevents, builder) - return builder -} - -func (b *EventStateBuilder) RemoveOnEvents(remove *OnEventsBuilder) { - for i, val := range b.onevents { - if val == remove { - b.onevents[i] = b.onevents[len(b.onevents)-1] - b.onevents = b.onevents[:len(b.onevents)-1] - } - } -} -func (b *EventStateBuilder) Timeouts() *EventStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewEventStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *EventStateBuilder) Build() EventState { - b.model.OnEvents = []OnEvents{} - for _, v := range b.onevents { - b.model.OnEvents = append(b.model.OnEvents, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateTimeoutBuilder() *EventStateTimeoutBuilder { - builder := &EventStateTimeoutBuilder{} - builder.model = EventStateTimeout{} - return builder -} - -type EventStateTimeoutBuilder struct { - model EventStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *EventStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *EventStateTimeoutBuilder) ActionExecTimeout(input string) *EventStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) EventTimeout(input string) *EventStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) Build() EventStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventsBuilder() *EventsBuilder { - builder := &EventsBuilder{} - builder.model = Events{} - return builder -} - -type EventsBuilder struct { - model Events -} - -func (b *EventsBuilder) Build() Events { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateBuilder() *ForEachStateBuilder { - builder := &ForEachStateBuilder{} - builder.model = ForEachState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type ForEachStateBuilder struct { - model ForEachState - actions []*ActionBuilder - timeouts *ForEachStateTimeoutBuilder -} - -func (b *ForEachStateBuilder) InputCollection(input string) *ForEachStateBuilder { - b.model.InputCollection = input - return b -} - -func (b *ForEachStateBuilder) OutputCollection(input string) *ForEachStateBuilder { - b.model.OutputCollection = input - return b -} - -func (b *ForEachStateBuilder) IterationParam(input string) *ForEachStateBuilder { - b.model.IterationParam = input - return b -} - -func (b *ForEachStateBuilder) BatchSize(input *intstr.IntOrString) *ForEachStateBuilder { - b.model.BatchSize = input - return b -} - -func (b *ForEachStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *ForEachStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *ForEachStateBuilder) Timeouts() *ForEachStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewForEachStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ForEachStateBuilder) Mode(input ForEachModeType) *ForEachStateBuilder { - b.model.Mode = input - return b -} - -func (b *ForEachStateBuilder) Build() ForEachState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateTimeoutBuilder() *ForEachStateTimeoutBuilder { - builder := &ForEachStateTimeoutBuilder{} - builder.model = ForEachStateTimeout{} - return builder -} - -type ForEachStateTimeoutBuilder struct { - model ForEachStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ForEachStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ForEachStateTimeoutBuilder) ActionExecTimeout(input string) *ForEachStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *ForEachStateTimeoutBuilder) Build() ForEachStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionBuilder() *FunctionBuilder { - builder := &FunctionBuilder{} - builder.model = Function{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type FunctionBuilder struct { - model Function - CommonBuilder -} - -func (b *FunctionBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *FunctionBuilder) Name(input string) *FunctionBuilder { - b.model.Name = input - return b -} - -func (b *FunctionBuilder) Operation(input string) *FunctionBuilder { - b.model.Operation = input - return b -} - -func (b *FunctionBuilder) Type(input FunctionType) *FunctionBuilder { - b.model.Type = input - return b -} - -func (b *FunctionBuilder) AuthRef(input string) *FunctionBuilder { - b.model.AuthRef = input - return b -} - -func (b *FunctionBuilder) Build() Function { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionRefBuilder() *FunctionRefBuilder { - builder := &FunctionRefBuilder{} - builder.model = FunctionRef{} - builder.model.ApplyDefault() - return builder -} - -type FunctionRefBuilder struct { - model FunctionRef -} - -func (b *FunctionRefBuilder) RefName(input string) *FunctionRefBuilder { - b.model.RefName = input - return b -} - -func (b *FunctionRefBuilder) Arguments(input map[string]Object) *FunctionRefBuilder { - b.model.Arguments = input - return b -} - -func (b *FunctionRefBuilder) SelectionSet(input string) *FunctionRefBuilder { - b.model.SelectionSet = input - return b -} - -func (b *FunctionRefBuilder) Invoke(input InvokeKind) *FunctionRefBuilder { - b.model.Invoke = input - return b -} - -func (b *FunctionRefBuilder) Build() FunctionRef { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionsBuilder() *FunctionsBuilder { - builder := &FunctionsBuilder{} - builder.model = Functions{} - return builder -} - -type FunctionsBuilder struct { - model Functions -} - -func (b *FunctionsBuilder) Build() Functions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateBuilder() *InjectStateBuilder { - builder := &InjectStateBuilder{} - builder.model = InjectState{} - return builder -} - -type InjectStateBuilder struct { - model InjectState - timeouts *InjectStateTimeoutBuilder -} - -func (b *InjectStateBuilder) Data(input map[string]Object) *InjectStateBuilder { - b.model.Data = input - return b -} - -func (b *InjectStateBuilder) Timeouts() *InjectStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewInjectStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *InjectStateBuilder) Build() InjectState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateTimeoutBuilder() *InjectStateTimeoutBuilder { - builder := &InjectStateTimeoutBuilder{} - builder.model = InjectStateTimeout{} - return builder -} - -type InjectStateTimeoutBuilder struct { - model InjectStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *InjectStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *InjectStateTimeoutBuilder) Build() InjectStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewMetadataBuilder() *MetadataBuilder { - builder := &MetadataBuilder{} - builder.model = Metadata{} - return builder -} - -type MetadataBuilder struct { - model Metadata -} - -func (b *MetadataBuilder) Build() Metadata { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOAuth2AuthPropertiesBuilder() *OAuth2AuthPropertiesBuilder { - builder := &OAuth2AuthPropertiesBuilder{} - builder.model = OAuth2AuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type OAuth2AuthPropertiesBuilder struct { - model OAuth2AuthProperties - CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Secret(input string) *OAuth2AuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Authority(input string) *OAuth2AuthPropertiesBuilder { - b.model.Authority = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) GrantType(input GrantType) *OAuth2AuthPropertiesBuilder { - b.model.GrantType = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientID(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientID = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientSecret(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientSecret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Scopes(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Scopes = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Username(input string) *OAuth2AuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Password(input string) *OAuth2AuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Audiences(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Audiences = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) SubjectToken(input string) *OAuth2AuthPropertiesBuilder { - b.model.SubjectToken = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedSubject(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedSubject = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedIssuer(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedIssuer = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Build() OAuth2AuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewObjectBuilder() *ObjectBuilder { - builder := &ObjectBuilder{} - builder.model = Object{} - builder.slicevalue = []*ObjectBuilder{} - return builder -} - -type ObjectBuilder struct { - model Object - slicevalue []*ObjectBuilder -} - -func (b *ObjectBuilder) Type(input Type) *ObjectBuilder { - b.model.Type = input - return b -} - -func (b *ObjectBuilder) StringValue(input string) *ObjectBuilder { - b.model.StringValue = input - return b -} - -func (b *ObjectBuilder) IntValue(input int32) *ObjectBuilder { - b.model.IntValue = input - return b -} - -func (b *ObjectBuilder) FloatValue(input float64) *ObjectBuilder { - b.model.FloatValue = input - return b -} - -func (b *ObjectBuilder) MapValue(input map[string]Object) *ObjectBuilder { - b.model.MapValue = input - return b -} - -func (b *ObjectBuilder) AddSliceValue() *ObjectBuilder { - builder := NewObjectBuilder() - b.slicevalue = append(b.slicevalue, builder) - return builder -} - -func (b *ObjectBuilder) RemoveSliceValue(remove *ObjectBuilder) { - for i, val := range b.slicevalue { - if val == remove { - b.slicevalue[i] = b.slicevalue[len(b.slicevalue)-1] - b.slicevalue = b.slicevalue[:len(b.slicevalue)-1] - } - } -} -func (b *ObjectBuilder) BoolValue(input bool) *ObjectBuilder { - b.model.BoolValue = input - return b -} - -func (b *ObjectBuilder) Build() Object { - b.model.SliceValue = []Object{} - for _, v := range b.slicevalue { - b.model.SliceValue = append(b.model.SliceValue, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnErrorBuilder() *OnErrorBuilder { - builder := &OnErrorBuilder{} - builder.model = OnError{} - return builder -} - -type OnErrorBuilder struct { - model OnError - transition *TransitionBuilder - end *EndBuilder -} - -func (b *OnErrorBuilder) ErrorRef(input string) *OnErrorBuilder { - b.model.ErrorRef = input - return b -} - -func (b *OnErrorBuilder) ErrorRefs(input []string) *OnErrorBuilder { - b.model.ErrorRefs = input - return b -} - -func (b *OnErrorBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *OnErrorBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *OnErrorBuilder) Build() OnError { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnEventsBuilder() *OnEventsBuilder { - builder := &OnEventsBuilder{} - builder.model = OnEvents{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - builder.eventdatafilter = NewEventDataFilterBuilder() - return builder -} - -type OnEventsBuilder struct { - model OnEvents - actions []*ActionBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *OnEventsBuilder) EventRefs(input []string) *OnEventsBuilder { - b.model.EventRefs = input - return b -} - -func (b *OnEventsBuilder) ActionMode(input ActionMode) *OnEventsBuilder { - b.model.ActionMode = input - return b -} - -func (b *OnEventsBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OnEventsBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OnEventsBuilder) EventDataFilter() *EventDataFilterBuilder { - return b.eventdatafilter -} - -func (b *OnEventsBuilder) Build() OnEvents { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - b.model.EventDataFilter = b.eventdatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateBuilder() *OperationStateBuilder { - builder := &OperationStateBuilder{} - builder.model = OperationState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type OperationStateBuilder struct { - model OperationState - actions []*ActionBuilder - timeouts *OperationStateTimeoutBuilder -} - -func (b *OperationStateBuilder) ActionMode(input ActionMode) *OperationStateBuilder { - b.model.ActionMode = input - return b -} - -func (b *OperationStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OperationStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OperationStateBuilder) Timeouts() *OperationStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewOperationStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *OperationStateBuilder) Build() OperationState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateTimeoutBuilder() *OperationStateTimeoutBuilder { - builder := &OperationStateTimeoutBuilder{} - builder.model = OperationStateTimeout{} - return builder -} - -type OperationStateTimeoutBuilder struct { - model OperationStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *OperationStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *OperationStateTimeoutBuilder) ActionExecTimeout(input string) *OperationStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *OperationStateTimeoutBuilder) Build() OperationStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateBuilder() *ParallelStateBuilder { - builder := &ParallelStateBuilder{} - builder.model = ParallelState{} - builder.model.ApplyDefault() - builder.branches = []*BranchBuilder{} - return builder -} - -type ParallelStateBuilder struct { - model ParallelState - branches []*BranchBuilder - timeouts *ParallelStateTimeoutBuilder -} - -func (b *ParallelStateBuilder) AddBranches() *BranchBuilder { - builder := NewBranchBuilder() - b.branches = append(b.branches, builder) - return builder -} - -func (b *ParallelStateBuilder) RemoveBranches(remove *BranchBuilder) { - for i, val := range b.branches { - if val == remove { - b.branches[i] = b.branches[len(b.branches)-1] - b.branches = b.branches[:len(b.branches)-1] - } - } -} -func (b *ParallelStateBuilder) CompletionType(input CompletionType) *ParallelStateBuilder { - b.model.CompletionType = input - return b -} - -func (b *ParallelStateBuilder) NumCompleted(input intstr.IntOrString) *ParallelStateBuilder { - b.model.NumCompleted = input - return b -} - -func (b *ParallelStateBuilder) Timeouts() *ParallelStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewParallelStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ParallelStateBuilder) Build() ParallelState { - b.model.Branches = []Branch{} - for _, v := range b.branches { - b.model.Branches = append(b.model.Branches, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateTimeoutBuilder() *ParallelStateTimeoutBuilder { - builder := &ParallelStateTimeoutBuilder{} - builder.model = ParallelStateTimeout{} - return builder -} - -type ParallelStateTimeoutBuilder struct { - model ParallelStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ParallelStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ParallelStateTimeoutBuilder) BranchExecTimeout(input string) *ParallelStateTimeoutBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *ParallelStateTimeoutBuilder) Build() ParallelStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewProduceEventBuilder() *ProduceEventBuilder { - builder := &ProduceEventBuilder{} - builder.model = ProduceEvent{} - builder.data = NewObjectBuilder() - return builder -} - -type ProduceEventBuilder struct { - model ProduceEvent - data *ObjectBuilder -} - -func (b *ProduceEventBuilder) EventRef(input string) *ProduceEventBuilder { - b.model.EventRef = input - return b -} - -func (b *ProduceEventBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ProduceEventBuilder) ContextAttributes(input map[string]string) *ProduceEventBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *ProduceEventBuilder) Build() ProduceEvent { - b.model.Data = b.data.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetriesBuilder() *RetriesBuilder { - builder := &RetriesBuilder{} - builder.model = Retries{} - return builder -} - -type RetriesBuilder struct { - model Retries -} - -func (b *RetriesBuilder) Build() Retries { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetryBuilder() *RetryBuilder { - builder := &RetryBuilder{} - builder.model = Retry{} - builder.model.ApplyDefault() - return builder -} - -type RetryBuilder struct { - model Retry -} - -func (b *RetryBuilder) Name(input string) *RetryBuilder { - b.model.Name = input - return b -} - -func (b *RetryBuilder) Delay(input string) *RetryBuilder { - b.model.Delay = input - return b -} - -func (b *RetryBuilder) MaxDelay(input string) *RetryBuilder { - b.model.MaxDelay = input - return b -} - -func (b *RetryBuilder) Increment(input string) *RetryBuilder { - b.model.Increment = input - return b -} - -func (b *RetryBuilder) Multiplier(input *floatstr.Float32OrString) *RetryBuilder { - b.model.Multiplier = input - return b -} - -func (b *RetryBuilder) MaxAttempts(input intstr.IntOrString) *RetryBuilder { - b.model.MaxAttempts = input - return b -} - -func (b *RetryBuilder) Jitter(input floatstr.Float32OrString) *RetryBuilder { - b.model.Jitter = input - return b -} - -func (b *RetryBuilder) Build() Retry { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewScheduleBuilder() *ScheduleBuilder { - builder := &ScheduleBuilder{} - builder.model = Schedule{} - return builder -} - -type ScheduleBuilder struct { - model Schedule - cron *CronBuilder -} - -func (b *ScheduleBuilder) Interval(input string) *ScheduleBuilder { - b.model.Interval = input - return b -} - -func (b *ScheduleBuilder) Cron() *CronBuilder { - if b.cron == nil { - b.cron = NewCronBuilder() - } - return b.cron -} - -func (b *ScheduleBuilder) Timezone(input string) *ScheduleBuilder { - b.model.Timezone = input - return b -} - -func (b *ScheduleBuilder) Build() Schedule { - if b.cron != nil { - cron := b.cron.Build() - b.model.Cron = &cron - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSecretsBuilder() *SecretsBuilder { - builder := &SecretsBuilder{} - builder.model = Secrets{} - return builder -} - -type SecretsBuilder struct { - model Secrets -} - -func (b *SecretsBuilder) Build() Secrets { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepBuilder() *SleepBuilder { - builder := &SleepBuilder{} - builder.model = Sleep{} - return builder -} - -type SleepBuilder struct { - model Sleep -} - -func (b *SleepBuilder) Before(input string) *SleepBuilder { - b.model.Before = input - return b -} - -func (b *SleepBuilder) After(input string) *SleepBuilder { - b.model.After = input - return b -} - -func (b *SleepBuilder) Build() Sleep { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateBuilder() *SleepStateBuilder { - builder := &SleepStateBuilder{} - builder.model = SleepState{} - return builder -} - -type SleepStateBuilder struct { - model SleepState - timeouts *SleepStateTimeoutBuilder -} - -func (b *SleepStateBuilder) Duration(input string) *SleepStateBuilder { - b.model.Duration = input - return b -} - -func (b *SleepStateBuilder) Timeouts() *SleepStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSleepStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SleepStateBuilder) Build() SleepState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateTimeoutBuilder() *SleepStateTimeoutBuilder { - builder := &SleepStateTimeoutBuilder{} - builder.model = SleepStateTimeout{} - return builder -} - -type SleepStateTimeoutBuilder struct { - model SleepStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SleepStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SleepStateTimeoutBuilder) Build() SleepStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStartBuilder() *StartBuilder { - builder := &StartBuilder{} - builder.model = Start{} - return builder -} - -type StartBuilder struct { - model Start - schedule *ScheduleBuilder -} - -func (b *StartBuilder) StateName(input string) *StartBuilder { - b.model.StateName = input - return b -} - -func (b *StartBuilder) Schedule() *ScheduleBuilder { - if b.schedule == nil { - b.schedule = NewScheduleBuilder() - } - return b.schedule -} - -func (b *StartBuilder) Build() Start { - if b.schedule != nil { - schedule := b.schedule.Build() - b.model.Schedule = &schedule - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateBuilder() *StateBuilder { - builder := &StateBuilder{} - builder.model = State{} - builder.BaseStateBuilder = *NewBaseStateBuilder() - return builder -} - -type StateBuilder struct { - model State - BaseStateBuilder - *DelayStateBuilder - *EventStateBuilder - *OperationStateBuilder - *ParallelStateBuilder - *SwitchStateBuilder - *ForEachStateBuilder - *InjectStateBuilder - *CallbackStateBuilder - *SleepStateBuilder -} - -func (b *StateBuilder) ID(input string) *StateBuilder { - b.BaseStateBuilder.ID(input) - return b -} - -func (b *StateBuilder) Name(input string) *StateBuilder { - b.BaseStateBuilder.Name(input) - return b -} - -func (b *StateBuilder) Type(input StateType) *StateBuilder { - b.BaseStateBuilder.Type(input) - return b -} - -func (b *StateBuilder) CompensatedBy(input string) *StateBuilder { - b.BaseStateBuilder.CompensatedBy(input) - return b -} - -func (b *StateBuilder) UsedForCompensation(input bool) *StateBuilder { - b.BaseStateBuilder.UsedForCompensation(input) - return b -} - -func (b *StateBuilder) DelayState() *DelayStateBuilder { - if b.DelayStateBuilder == nil { - b.DelayStateBuilder = NewDelayStateBuilder() - } - return b.DelayStateBuilder -} - -func (b *StateBuilder) TimeDelay(input string) *StateBuilder { - b.DelayStateBuilder.TimeDelay(input) - return b -} - -func (b *StateBuilder) EventState() *EventStateBuilder { - if b.EventStateBuilder == nil { - b.EventStateBuilder = NewEventStateBuilder() - } - return b.EventStateBuilder -} - -func (b *StateBuilder) Exclusive(input bool) *StateBuilder { - b.EventStateBuilder.Exclusive(input) - return b -} - -func (b *StateBuilder) OperationState() *OperationStateBuilder { - if b.OperationStateBuilder == nil { - b.OperationStateBuilder = NewOperationStateBuilder() - } - return b.OperationStateBuilder -} - -func (b *StateBuilder) ActionMode(input ActionMode) *StateBuilder { - b.OperationStateBuilder.ActionMode(input) - return b -} - -func (b *StateBuilder) ParallelState() *ParallelStateBuilder { - if b.ParallelStateBuilder == nil { - b.ParallelStateBuilder = NewParallelStateBuilder() - } - return b.ParallelStateBuilder -} - -func (b *StateBuilder) CompletionType(input CompletionType) *StateBuilder { - b.ParallelStateBuilder.CompletionType(input) - return b -} - -func (b *StateBuilder) SwitchState() *SwitchStateBuilder { - if b.SwitchStateBuilder == nil { - b.SwitchStateBuilder = NewSwitchStateBuilder() - } - return b.SwitchStateBuilder -} - -func (b *StateBuilder) ForEachState() *ForEachStateBuilder { - if b.ForEachStateBuilder == nil { - b.ForEachStateBuilder = NewForEachStateBuilder() - } - return b.ForEachStateBuilder -} - -func (b *StateBuilder) InputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.InputCollection(input) - return b -} - -func (b *StateBuilder) OutputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.OutputCollection(input) - return b -} - -func (b *StateBuilder) IterationParam(input string) *StateBuilder { - b.ForEachStateBuilder.IterationParam(input) - return b -} - -func (b *StateBuilder) Mode(input ForEachModeType) *StateBuilder { - b.ForEachStateBuilder.Mode(input) - return b -} - -func (b *StateBuilder) InjectState() *InjectStateBuilder { - if b.InjectStateBuilder == nil { - b.InjectStateBuilder = NewInjectStateBuilder() - } - return b.InjectStateBuilder -} - -func (b *StateBuilder) CallbackState() *CallbackStateBuilder { - if b.CallbackStateBuilder == nil { - b.CallbackStateBuilder = NewCallbackStateBuilder() - } - return b.CallbackStateBuilder -} - -func (b *StateBuilder) EventRef(input string) *StateBuilder { - b.CallbackStateBuilder.EventRef(input) - return b -} - -func (b *StateBuilder) SleepState() *SleepStateBuilder { - if b.SleepStateBuilder == nil { - b.SleepStateBuilder = NewSleepStateBuilder() - } - return b.SleepStateBuilder -} - -func (b *StateBuilder) Duration(input string) *StateBuilder { - b.SleepStateBuilder.Duration(input) - return b -} - -func (b *StateBuilder) Build() State { - b.model.BaseState = b.BaseStateBuilder.Build() - if b.DelayStateBuilder != nil { - delaystate := b.DelayStateBuilder.Build() - b.model.DelayState = &delaystate - } - if b.EventStateBuilder != nil { - eventstate := b.EventStateBuilder.Build() - b.model.EventState = &eventstate - } - if b.OperationStateBuilder != nil { - operationstate := b.OperationStateBuilder.Build() - b.model.OperationState = &operationstate - } - if b.ParallelStateBuilder != nil { - parallelstate := b.ParallelStateBuilder.Build() - b.model.ParallelState = ¶llelstate - } - if b.SwitchStateBuilder != nil { - switchstate := b.SwitchStateBuilder.Build() - b.model.SwitchState = &switchstate - } - if b.ForEachStateBuilder != nil { - foreachstate := b.ForEachStateBuilder.Build() - b.model.ForEachState = &foreachstate - } - if b.InjectStateBuilder != nil { - injectstate := b.InjectStateBuilder.Build() - b.model.InjectState = &injectstate - } - if b.CallbackStateBuilder != nil { - callbackstate := b.CallbackStateBuilder.Build() - b.model.CallbackState = &callbackstate - } - if b.SleepStateBuilder != nil { - sleepstate := b.SleepStateBuilder.Build() - b.model.SleepState = &sleepstate - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateDataFilterBuilder() *StateDataFilterBuilder { - builder := &StateDataFilterBuilder{} - builder.model = StateDataFilter{} - return builder -} - -type StateDataFilterBuilder struct { - model StateDataFilter -} - -func (b *StateDataFilterBuilder) Input(input string) *StateDataFilterBuilder { - b.model.Input = input - return b -} - -func (b *StateDataFilterBuilder) Output(input string) *StateDataFilterBuilder { - b.model.Output = input - return b -} - -func (b *StateDataFilterBuilder) Build() StateDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateExecTimeoutBuilder() *StateExecTimeoutBuilder { - builder := &StateExecTimeoutBuilder{} - builder.model = StateExecTimeout{} - return builder -} - -type StateExecTimeoutBuilder struct { - model StateExecTimeout -} - -func (b *StateExecTimeoutBuilder) Single(input string) *StateExecTimeoutBuilder { - b.model.Single = input - return b -} - -func (b *StateExecTimeoutBuilder) Total(input string) *StateExecTimeoutBuilder { - b.model.Total = input - return b -} - -func (b *StateExecTimeoutBuilder) Build() StateExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStatesBuilder() *StatesBuilder { - builder := &StatesBuilder{} - builder.model = States{} - return builder -} - -type StatesBuilder struct { - model States -} - -func (b *StatesBuilder) Build() States { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateBuilder() *SwitchStateBuilder { - builder := &SwitchStateBuilder{} - builder.model = SwitchState{} - builder.defaultcondition = NewDefaultConditionBuilder() - builder.eventconditions = []*EventConditionBuilder{} - builder.dataconditions = []*DataConditionBuilder{} - return builder -} - -type SwitchStateBuilder struct { - model SwitchState - defaultcondition *DefaultConditionBuilder - eventconditions []*EventConditionBuilder - dataconditions []*DataConditionBuilder - timeouts *SwitchStateTimeoutBuilder -} - -func (b *SwitchStateBuilder) DefaultCondition() *DefaultConditionBuilder { - return b.defaultcondition -} - -func (b *SwitchStateBuilder) AddEventConditions() *EventConditionBuilder { - builder := NewEventConditionBuilder() - b.eventconditions = append(b.eventconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveEventConditions(remove *EventConditionBuilder) { - for i, val := range b.eventconditions { - if val == remove { - b.eventconditions[i] = b.eventconditions[len(b.eventconditions)-1] - b.eventconditions = b.eventconditions[:len(b.eventconditions)-1] - } - } -} -func (b *SwitchStateBuilder) AddDataConditions() *DataConditionBuilder { - builder := NewDataConditionBuilder() - b.dataconditions = append(b.dataconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveDataConditions(remove *DataConditionBuilder) { - for i, val := range b.dataconditions { - if val == remove { - b.dataconditions[i] = b.dataconditions[len(b.dataconditions)-1] - b.dataconditions = b.dataconditions[:len(b.dataconditions)-1] - } - } -} -func (b *SwitchStateBuilder) Timeouts() *SwitchStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSwitchStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SwitchStateBuilder) Build() SwitchState { - b.model.DefaultCondition = b.defaultcondition.Build() - b.model.EventConditions = []EventCondition{} - for _, v := range b.eventconditions { - b.model.EventConditions = append(b.model.EventConditions, v.Build()) - } - b.model.DataConditions = []DataCondition{} - for _, v := range b.dataconditions { - b.model.DataConditions = append(b.model.DataConditions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateTimeoutBuilder() *SwitchStateTimeoutBuilder { - builder := &SwitchStateTimeoutBuilder{} - builder.model = SwitchStateTimeout{} - return builder -} - -type SwitchStateTimeoutBuilder struct { - model SwitchStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SwitchStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SwitchStateTimeoutBuilder) EventTimeout(input string) *SwitchStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *SwitchStateTimeoutBuilder) Build() SwitchStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTimeoutsBuilder() *TimeoutsBuilder { - builder := &TimeoutsBuilder{} - builder.model = Timeouts{} - return builder -} - -type TimeoutsBuilder struct { - model Timeouts - workflowexectimeout *WorkflowExecTimeoutBuilder - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *TimeoutsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - if b.workflowexectimeout == nil { - b.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - } - return b.workflowexectimeout -} - -func (b *TimeoutsBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *TimeoutsBuilder) ActionExecTimeout(input string) *TimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) BranchExecTimeout(input string) *TimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) EventTimeout(input string) *TimeoutsBuilder { - b.model.EventTimeout = input - return b -} - -func (b *TimeoutsBuilder) Build() Timeouts { - if b.workflowexectimeout != nil { - workflowexectimeout := b.workflowexectimeout.Build() - b.model.WorkflowExecTimeout = &workflowexectimeout - } - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTransitionBuilder() *TransitionBuilder { - builder := &TransitionBuilder{} - builder.model = Transition{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type TransitionBuilder struct { - model Transition - stateparent *StateBuilder - produceevents []*ProduceEventBuilder -} - -func (b *TransitionBuilder) stateParent() *StateBuilder { - if b.stateparent == nil { - b.stateparent = NewStateBuilder() - } - return b.stateparent -} - -func (b *TransitionBuilder) NextState(input string) *TransitionBuilder { - b.model.NextState = input - return b -} - -func (b *TransitionBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *TransitionBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *TransitionBuilder) Compensate(input bool) *TransitionBuilder { - b.model.Compensate = input - return b -} - -func (b *TransitionBuilder) Build() Transition { - if b.stateparent != nil { - stateparent := b.stateparent.Build() - b.model.stateParent = &stateparent - } - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowBuilder() *WorkflowBuilder { - builder := &WorkflowBuilder{} - builder.model = Workflow{} - builder.BaseWorkflowBuilder = *NewBaseWorkflowBuilder() - builder.states = []*StateBuilder{} - builder.events = []*EventBuilder{} - builder.functions = []*FunctionBuilder{} - builder.retries = []*RetryBuilder{} - return builder -} - -type WorkflowBuilder struct { - model Workflow - BaseWorkflowBuilder - states []*StateBuilder - events []*EventBuilder - functions []*FunctionBuilder - retries []*RetryBuilder -} - -func (b *WorkflowBuilder) ID(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.ID(input) - return b -} - -func (b *WorkflowBuilder) Key(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Key(input) - return b -} - -func (b *WorkflowBuilder) Name(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Name(input) - return b -} - -func (b *WorkflowBuilder) Description(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Description(input) - return b -} - -func (b *WorkflowBuilder) Version(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Version(input) - return b -} - -func (b *WorkflowBuilder) SpecVersion(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.SpecVersion(input) - return b -} - -func (b *WorkflowBuilder) ExpressionLang(input ExpressionLangType) *WorkflowBuilder { - b.BaseWorkflowBuilder.ExpressionLang(input) - return b -} - -func (b *WorkflowBuilder) KeepActive(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.KeepActive(input) - return b -} - -func (b *WorkflowBuilder) AutoRetries(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.AutoRetries(input) - return b -} - -func (b *WorkflowBuilder) AddStates() *StateBuilder { - builder := NewStateBuilder() - b.states = append(b.states, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveStates(remove *StateBuilder) { - for i, val := range b.states { - if val == remove { - b.states[i] = b.states[len(b.states)-1] - b.states = b.states[:len(b.states)-1] - } - } -} -func (b *WorkflowBuilder) AddEvents() *EventBuilder { - builder := NewEventBuilder() - b.events = append(b.events, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveEvents(remove *EventBuilder) { - for i, val := range b.events { - if val == remove { - b.events[i] = b.events[len(b.events)-1] - b.events = b.events[:len(b.events)-1] - } - } -} -func (b *WorkflowBuilder) AddFunctions() *FunctionBuilder { - builder := NewFunctionBuilder() - b.functions = append(b.functions, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveFunctions(remove *FunctionBuilder) { - for i, val := range b.functions { - if val == remove { - b.functions[i] = b.functions[len(b.functions)-1] - b.functions = b.functions[:len(b.functions)-1] - } - } -} -func (b *WorkflowBuilder) AddRetries() *RetryBuilder { - builder := NewRetryBuilder() - b.retries = append(b.retries, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveRetries(remove *RetryBuilder) { - for i, val := range b.retries { - if val == remove { - b.retries[i] = b.retries[len(b.retries)-1] - b.retries = b.retries[:len(b.retries)-1] - } - } -} -func (b *WorkflowBuilder) Build() Workflow { - b.model.BaseWorkflow = b.BaseWorkflowBuilder.Build() - b.model.States = []State{} - for _, v := range b.states { - b.model.States = append(b.model.States, v.Build()) - } - b.model.Events = []Event{} - for _, v := range b.events { - b.model.Events = append(b.model.Events, v.Build()) - } - b.model.Functions = []Function{} - for _, v := range b.functions { - b.model.Functions = append(b.model.Functions, v.Build()) - } - b.model.Retries = []Retry{} - for _, v := range b.retries { - b.model.Retries = append(b.model.Retries, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowExecTimeoutBuilder() *WorkflowExecTimeoutBuilder { - builder := &WorkflowExecTimeoutBuilder{} - builder.model = WorkflowExecTimeout{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowExecTimeoutBuilder struct { - model WorkflowExecTimeout -} - -func (b *WorkflowExecTimeoutBuilder) Duration(input string) *WorkflowExecTimeoutBuilder { - b.model.Duration = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Interrupt(input bool) *WorkflowExecTimeoutBuilder { - b.model.Interrupt = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) RunBefore(input string) *WorkflowExecTimeoutBuilder { - b.model.RunBefore = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Build() WorkflowExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowRefBuilder() *WorkflowRefBuilder { - builder := &WorkflowRefBuilder{} - builder.model = WorkflowRef{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowRefBuilder struct { - model WorkflowRef -} - -func (b *WorkflowRefBuilder) WorkflowID(input string) *WorkflowRefBuilder { - b.model.WorkflowID = input - return b -} - -func (b *WorkflowRefBuilder) Version(input string) *WorkflowRefBuilder { - b.model.Version = input - return b -} - -func (b *WorkflowRefBuilder) Invoke(input InvokeKind) *WorkflowRefBuilder { - b.model.Invoke = input - return b -} - -func (b *WorkflowRefBuilder) OnParentComplete(input OnParentCompleteType) *WorkflowRefBuilder { - b.model.OnParentComplete = input - return b -} - -func (b *WorkflowRefBuilder) Build() WorkflowRef { - return b.model -} diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go deleted file mode 100644 index 0fb2566..0000000 --- a/model/zz_generated.deepcopy.go +++ /dev/null @@ -1,1837 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by deepcopy-gen. DO NOT EDIT. - -package model - -import ( - json "encoding/json" - - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Action) DeepCopyInto(out *Action) { - *out = *in - if in.FunctionRef != nil { - in, out := &in.FunctionRef, &out.FunctionRef - *out = new(FunctionRef) - (*in).DeepCopyInto(*out) - } - if in.EventRef != nil { - in, out := &in.EventRef, &out.EventRef - *out = new(EventRef) - (*in).DeepCopyInto(*out) - } - if in.SubFlowRef != nil { - in, out := &in.SubFlowRef, &out.SubFlowRef - *out = new(WorkflowRef) - **out = **in - } - if in.Sleep != nil { - in, out := &in.Sleep, &out.Sleep - *out = new(Sleep) - **out = **in - } - if in.NonRetryableErrors != nil { - in, out := &in.NonRetryableErrors, &out.NonRetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.RetryableErrors != nil { - in, out := &in.RetryableErrors, &out.RetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - out.ActionDataFilter = in.ActionDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Action. -func (in *Action) DeepCopy() *Action { - if in == nil { - return nil - } - out := new(Action) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ActionDataFilter) DeepCopyInto(out *ActionDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ActionDataFilter. -func (in *ActionDataFilter) DeepCopy() *ActionDataFilter { - if in == nil { - return nil - } - out := new(ActionDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Auth) DeepCopyInto(out *Auth) { - *out = *in - in.Properties.DeepCopyInto(&out.Properties) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auth. -func (in *Auth) DeepCopy() *Auth { - if in == nil { - return nil - } - out := new(Auth) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AuthProperties) DeepCopyInto(out *AuthProperties) { - *out = *in - if in.Basic != nil { - in, out := &in.Basic, &out.Basic - *out = new(BasicAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.Bearer != nil { - in, out := &in.Bearer, &out.Bearer - *out = new(BearerAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.OAuth2 != nil { - in, out := &in.OAuth2, &out.OAuth2 - *out = new(OAuth2AuthProperties) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthProperties. -func (in *AuthProperties) DeepCopy() *AuthProperties { - if in == nil { - return nil - } - out := new(AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Auths) DeepCopyInto(out *Auths) { - { - in := &in - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. -func (in Auths) DeepCopy() Auths { - if in == nil { - return nil - } - out := new(Auths) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseState) DeepCopyInto(out *BaseState) { - *out = *in - if in.OnErrors != nil { - in, out := &in.OnErrors, &out.OnErrors - *out = make([]OnError, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.StateDataFilter != nil { - in, out := &in.StateDataFilter, &out.StateDataFilter - *out = new(StateDataFilter) - **out = **in - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = new(Metadata) - if **in != nil { - in, out := *in, *out - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseState. -func (in *BaseState) DeepCopy() *BaseState { - if in == nil { - return nil - } - out := new(BaseState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(Start) - (*in).DeepCopyInto(*out) - } - if in.Annotations != nil { - in, out := &in.Annotations, &out.Annotations - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DataInputSchema != nil { - in, out := &in.DataInputSchema, &out.DataInputSchema - *out = new(DataInputSchema) - (*in).DeepCopyInto(*out) - } - if in.Secrets != nil { - in, out := &in.Secrets, &out.Secrets - *out = make(Secrets, len(*in)) - copy(*out, *in) - } - if in.Constants != nil { - in, out := &in.Constants, &out.Constants - *out = new(Constants) - (*in).DeepCopyInto(*out) - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(Timeouts) - (*in).DeepCopyInto(*out) - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(Errors, len(*in)) - copy(*out, *in) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Auth != nil { - in, out := &in.Auth, &out.Auth - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseWorkflow. -func (in *BaseWorkflow) DeepCopy() *BaseWorkflow { - if in == nil { - return nil - } - out := new(BaseWorkflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BasicAuthProperties) DeepCopyInto(out *BasicAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAuthProperties. -func (in *BasicAuthProperties) DeepCopy() *BasicAuthProperties { - if in == nil { - return nil - } - out := new(BasicAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BearerAuthProperties) DeepCopyInto(out *BearerAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BearerAuthProperties. -func (in *BearerAuthProperties) DeepCopy() *BearerAuthProperties { - if in == nil { - return nil - } - out := new(BearerAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Branch) DeepCopyInto(out *Branch) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(BranchTimeouts) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Branch. -func (in *Branch) DeepCopy() *Branch { - if in == nil { - return nil - } - out := new(Branch) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BranchTimeouts) DeepCopyInto(out *BranchTimeouts) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BranchTimeouts. -func (in *BranchTimeouts) DeepCopy() *BranchTimeouts { - if in == nil { - return nil - } - out := new(BranchTimeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackState) DeepCopyInto(out *CallbackState) { - *out = *in - in.Action.DeepCopyInto(&out.Action) - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(CallbackStateTimeout) - (*in).DeepCopyInto(*out) - } - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackState. -func (in *CallbackState) DeepCopy() *CallbackState { - if in == nil { - return nil - } - out := new(CallbackState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackStateTimeout) DeepCopyInto(out *CallbackStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackStateTimeout. -func (in *CallbackStateTimeout) DeepCopy() *CallbackStateTimeout { - if in == nil { - return nil - } - out := new(CallbackStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Common) DeepCopyInto(out *Common) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Common. -func (in *Common) DeepCopy() *Common { - if in == nil { - return nil - } - out := new(Common) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Constants) DeepCopyInto(out *Constants) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Constants. -func (in *Constants) DeepCopy() *Constants { - if in == nil { - return nil - } - out := new(Constants) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in ConstantsData) DeepCopyInto(out *ConstantsData) { - { - in := &in - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConstantsData. -func (in ConstantsData) DeepCopy() ConstantsData { - if in == nil { - return nil - } - out := new(ConstantsData) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ContinueAs) DeepCopyInto(out *ContinueAs) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - out.WorkflowExecTimeout = in.WorkflowExecTimeout - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ContinueAs. -func (in *ContinueAs) DeepCopy() *ContinueAs { - if in == nil { - return nil - } - out := new(ContinueAs) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Correlation) DeepCopyInto(out *Correlation) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation. -func (in *Correlation) DeepCopy() *Correlation { - if in == nil { - return nil - } - out := new(Correlation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Cron) DeepCopyInto(out *Cron) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cron. -func (in *Cron) DeepCopy() *Cron { - if in == nil { - return nil - } - out := new(Cron) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataCondition) DeepCopyInto(out *DataCondition) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCondition. -func (in *DataCondition) DeepCopy() *DataCondition { - if in == nil { - return nil - } - out := new(DataCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { - *out = *in - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(Object) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSchema. -func (in *DataInputSchema) DeepCopy() *DataInputSchema { - if in == nil { - return nil - } - out := new(DataInputSchema) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DefaultCondition) DeepCopyInto(out *DefaultCondition) { - *out = *in - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultCondition. -func (in *DefaultCondition) DeepCopy() *DefaultCondition { - if in == nil { - return nil - } - out := new(DefaultCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DelayState) DeepCopyInto(out *DelayState) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DelayState. -func (in *DelayState) DeepCopy() *DelayState { - if in == nil { - return nil - } - out := new(DelayState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *End) DeepCopyInto(out *End) { - *out = *in - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ContinueAs != nil { - in, out := &in.ContinueAs, &out.ContinueAs - *out = new(ContinueAs) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new End. -func (in *End) DeepCopy() *End { - if in == nil { - return nil - } - out := new(End) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Error) DeepCopyInto(out *Error) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Error. -func (in *Error) DeepCopy() *Error { - if in == nil { - return nil - } - out := new(Error) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Errors) DeepCopyInto(out *Errors) { - { - in := &in - *out = make(Errors, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Errors. -func (in Errors) DeepCopy() Errors { - if in == nil { - return nil - } - out := new(Errors) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Event) DeepCopyInto(out *Event) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Correlation != nil { - in, out := &in.Correlation, &out.Correlation - *out = make([]Correlation, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Event. -func (in *Event) DeepCopy() *Event { - if in == nil { - return nil - } - out := new(Event) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventCondition) DeepCopyInto(out *EventCondition) { - *out = *in - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventCondition. -func (in *EventCondition) DeepCopy() *EventCondition { - if in == nil { - return nil - } - out := new(EventCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in EventConditions) DeepCopyInto(out *EventConditions) { - { - in := &in - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventConditions. -func (in EventConditions) DeepCopy() EventConditions { - if in == nil { - return nil - } - out := new(EventConditions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventDataFilter) DeepCopyInto(out *EventDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDataFilter. -func (in *EventDataFilter) DeepCopy() *EventDataFilter { - if in == nil { - return nil - } - out := new(EventDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventRef) DeepCopyInto(out *EventRef) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = new(Object) - (*in).DeepCopyInto(*out) - } - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventRef. -func (in *EventRef) DeepCopy() *EventRef { - if in == nil { - return nil - } - out := new(EventRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventState) DeepCopyInto(out *EventState) { - *out = *in - if in.OnEvents != nil { - in, out := &in.OnEvents, &out.OnEvents - *out = make([]OnEvents, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(EventStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventState. -func (in *EventState) DeepCopy() *EventState { - if in == nil { - return nil - } - out := new(EventState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventStateTimeout) DeepCopyInto(out *EventStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventStateTimeout. -func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { - if in == nil { - return nil - } - out := new(EventStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Events) DeepCopyInto(out *Events) { - { - in := &in - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Events. -func (in Events) DeepCopy() Events { - if in == nil { - return nil - } - out := new(Events) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachState) DeepCopyInto(out *ForEachState) { - *out = *in - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(intstr.IntOrString) - **out = **in - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ForEachStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachState. -func (in *ForEachState) DeepCopy() *ForEachState { - if in == nil { - return nil - } - out := new(ForEachState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachStateTimeout) DeepCopyInto(out *ForEachStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachStateTimeout. -func (in *ForEachStateTimeout) DeepCopy() *ForEachStateTimeout { - if in == nil { - return nil - } - out := new(ForEachStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Function) DeepCopyInto(out *Function) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Function. -func (in *Function) DeepCopy() *Function { - if in == nil { - return nil - } - out := new(Function) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FunctionRef) DeepCopyInto(out *FunctionRef) { - *out = *in - if in.Arguments != nil { - in, out := &in.Arguments, &out.Arguments - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FunctionRef. -func (in *FunctionRef) DeepCopy() *FunctionRef { - if in == nil { - return nil - } - out := new(FunctionRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Functions) DeepCopyInto(out *Functions) { - { - in := &in - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Functions. -func (in Functions) DeepCopy() Functions { - if in == nil { - return nil - } - out := new(Functions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectState) DeepCopyInto(out *InjectState) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(InjectStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectState. -func (in *InjectState) DeepCopy() *InjectState { - if in == nil { - return nil - } - out := new(InjectState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectStateTimeout) DeepCopyInto(out *InjectStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectStateTimeout. -func (in *InjectStateTimeout) DeepCopy() *InjectStateTimeout { - if in == nil { - return nil - } - out := new(InjectStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Metadata) DeepCopyInto(out *Metadata) { - { - in := &in - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Metadata. -func (in Metadata) DeepCopy() Metadata { - if in == nil { - return nil - } - out := new(Metadata) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OAuth2AuthProperties) DeepCopyInto(out *OAuth2AuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Scopes != nil { - in, out := &in.Scopes, &out.Scopes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Audiences != nil { - in, out := &in.Audiences, &out.Audiences - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2AuthProperties. -func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { - if in == nil { - return nil - } - out := new(OAuth2AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Object) DeepCopyInto(out *Object) { - *out = *in - if in.MapValue != nil { - in, out := &in.MapValue, &out.MapValue - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.SliceValue != nil { - in, out := &in.SliceValue, &out.SliceValue - *out = make([]Object, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Object. -func (in *Object) DeepCopy() *Object { - if in == nil { - return nil - } - out := new(Object) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnError) DeepCopyInto(out *OnError) { - *out = *in - if in.ErrorRefs != nil { - in, out := &in.ErrorRefs, &out.ErrorRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnError. -func (in *OnError) DeepCopy() *OnError { - if in == nil { - return nil - } - out := new(OnError) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnEvents) DeepCopyInto(out *OnEvents) { - *out = *in - if in.EventRefs != nil { - in, out := &in.EventRefs, &out.EventRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.EventDataFilter = in.EventDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnEvents. -func (in *OnEvents) DeepCopy() *OnEvents { - if in == nil { - return nil - } - out := new(OnEvents) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationState) DeepCopyInto(out *OperationState) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(OperationStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationState. -func (in *OperationState) DeepCopy() *OperationState { - if in == nil { - return nil - } - out := new(OperationState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationStateTimeout) DeepCopyInto(out *OperationStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationStateTimeout. -func (in *OperationStateTimeout) DeepCopy() *OperationStateTimeout { - if in == nil { - return nil - } - out := new(OperationStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelState) DeepCopyInto(out *ParallelState) { - *out = *in - if in.Branches != nil { - in, out := &in.Branches, &out.Branches - *out = make([]Branch, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.NumCompleted = in.NumCompleted - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ParallelStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelState. -func (in *ParallelState) DeepCopy() *ParallelState { - if in == nil { - return nil - } - out := new(ParallelState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelStateTimeout) DeepCopyInto(out *ParallelStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelStateTimeout. -func (in *ParallelStateTimeout) DeepCopy() *ParallelStateTimeout { - if in == nil { - return nil - } - out := new(ParallelStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ProduceEvent) DeepCopyInto(out *ProduceEvent) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProduceEvent. -func (in *ProduceEvent) DeepCopy() *ProduceEvent { - if in == nil { - return nil - } - out := new(ProduceEvent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Retries) DeepCopyInto(out *Retries) { - { - in := &in - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retries. -func (in Retries) DeepCopy() Retries { - if in == nil { - return nil - } - out := new(Retries) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Retry) DeepCopyInto(out *Retry) { - *out = *in - if in.Multiplier != nil { - in, out := &in.Multiplier, &out.Multiplier - *out = new(floatstr.Float32OrString) - **out = **in - } - out.MaxAttempts = in.MaxAttempts - out.Jitter = in.Jitter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retry. -func (in *Retry) DeepCopy() *Retry { - if in == nil { - return nil - } - out := new(Retry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Schedule) DeepCopyInto(out *Schedule) { - *out = *in - if in.Cron != nil { - in, out := &in.Cron, &out.Cron - *out = new(Cron) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schedule. -func (in *Schedule) DeepCopy() *Schedule { - if in == nil { - return nil - } - out := new(Schedule) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Secrets) DeepCopyInto(out *Secrets) { - { - in := &in - *out = make(Secrets, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Secrets. -func (in Secrets) DeepCopy() Secrets { - if in == nil { - return nil - } - out := new(Secrets) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Sleep) DeepCopyInto(out *Sleep) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Sleep. -func (in *Sleep) DeepCopy() *Sleep { - if in == nil { - return nil - } - out := new(Sleep) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepState) DeepCopyInto(out *SleepState) { - *out = *in - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SleepStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepState. -func (in *SleepState) DeepCopy() *SleepState { - if in == nil { - return nil - } - out := new(SleepState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepStateTimeout) DeepCopyInto(out *SleepStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepStateTimeout. -func (in *SleepStateTimeout) DeepCopy() *SleepStateTimeout { - if in == nil { - return nil - } - out := new(SleepStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Start) DeepCopyInto(out *Start) { - *out = *in - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(Schedule) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Start. -func (in *Start) DeepCopy() *Start { - if in == nil { - return nil - } - out := new(Start) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *State) DeepCopyInto(out *State) { - *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) - if in.DelayState != nil { - in, out := &in.DelayState, &out.DelayState - *out = new(DelayState) - **out = **in - } - if in.EventState != nil { - in, out := &in.EventState, &out.EventState - *out = new(EventState) - (*in).DeepCopyInto(*out) - } - if in.OperationState != nil { - in, out := &in.OperationState, &out.OperationState - *out = new(OperationState) - (*in).DeepCopyInto(*out) - } - if in.ParallelState != nil { - in, out := &in.ParallelState, &out.ParallelState - *out = new(ParallelState) - (*in).DeepCopyInto(*out) - } - if in.SwitchState != nil { - in, out := &in.SwitchState, &out.SwitchState - *out = new(SwitchState) - (*in).DeepCopyInto(*out) - } - if in.ForEachState != nil { - in, out := &in.ForEachState, &out.ForEachState - *out = new(ForEachState) - (*in).DeepCopyInto(*out) - } - if in.InjectState != nil { - in, out := &in.InjectState, &out.InjectState - *out = new(InjectState) - (*in).DeepCopyInto(*out) - } - if in.CallbackState != nil { - in, out := &in.CallbackState, &out.CallbackState - *out = new(CallbackState) - (*in).DeepCopyInto(*out) - } - if in.SleepState != nil { - in, out := &in.SleepState, &out.SleepState - *out = new(SleepState) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new State. -func (in *State) DeepCopy() *State { - if in == nil { - return nil - } - out := new(State) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateDataFilter) DeepCopyInto(out *StateDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateDataFilter. -func (in *StateDataFilter) DeepCopy() *StateDataFilter { - if in == nil { - return nil - } - out := new(StateDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateExecTimeout) DeepCopyInto(out *StateExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateExecTimeout. -func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { - if in == nil { - return nil - } - out := new(StateExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in States) DeepCopyInto(out *States) { - { - in := &in - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new States. -func (in States) DeepCopy() States { - if in == nil { - return nil - } - out := new(States) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchState) DeepCopyInto(out *SwitchState) { - *out = *in - in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) - if in.EventConditions != nil { - in, out := &in.EventConditions, &out.EventConditions - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.DataConditions != nil { - in, out := &in.DataConditions, &out.DataConditions - *out = make([]DataCondition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SwitchStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchState. -func (in *SwitchState) DeepCopy() *SwitchState { - if in == nil { - return nil - } - out := new(SwitchState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchStateTimeout) DeepCopyInto(out *SwitchStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchStateTimeout. -func (in *SwitchStateTimeout) DeepCopy() *SwitchStateTimeout { - if in == nil { - return nil - } - out := new(SwitchStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Timeouts) DeepCopyInto(out *Timeouts) { - *out = *in - if in.WorkflowExecTimeout != nil { - in, out := &in.WorkflowExecTimeout, &out.WorkflowExecTimeout - *out = new(WorkflowExecTimeout) - **out = **in - } - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Timeouts. -func (in *Timeouts) DeepCopy() *Timeouts { - if in == nil { - return nil - } - out := new(Timeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Transition) DeepCopyInto(out *Transition) { - *out = *in - if in.stateParent != nil { - in, out := &in.stateParent, &out.stateParent - *out = new(State) - (*in).DeepCopyInto(*out) - } - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Transition. -func (in *Transition) DeepCopy() *Transition { - if in == nil { - return nil - } - out := new(Transition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ValidatorContext) DeepCopyInto(out *ValidatorContext) { - *out = *in - if in.States != nil { - in, out := &in.States, &out.States - *out = make(map[string]State, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(map[string]Function, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(map[string]Event, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(map[string]Retry, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(map[string]Error, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidatorContext. -func (in *ValidatorContext) DeepCopy() *ValidatorContext { - if in == nil { - return nil - } - out := new(ValidatorContext) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Workflow) DeepCopyInto(out *Workflow) { - *out = *in - in.BaseWorkflow.DeepCopyInto(&out.BaseWorkflow) - if in.States != nil { - in, out := &in.States, &out.States - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Workflow. -func (in *Workflow) DeepCopy() *Workflow { - if in == nil { - return nil - } - out := new(Workflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowExecTimeout) DeepCopyInto(out *WorkflowExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowExecTimeout. -func (in *WorkflowExecTimeout) DeepCopy() *WorkflowExecTimeout { - if in == nil { - return nil - } - out := new(WorkflowExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowRef) DeepCopyInto(out *WorkflowRef) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowRef. -func (in *WorkflowRef) DeepCopy() *WorkflowRef { - if in == nil { - return nil - } - out := new(WorkflowRef) - in.DeepCopyInto(out) - return out -} diff --git a/parser/cmd/main.go b/parser/cmd/main.go new file mode 100644 index 0000000..e811696 --- /dev/null +++ b/parser/cmd/main.go @@ -0,0 +1,67 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/parser" + "os" + "path/filepath" +) + +func main() { + if len(os.Args) < 2 { + fmt.Println("Usage: go run main.go ") + os.Exit(1) + } + + baseDir := os.Args[1] + supportedExt := []string{".json", ".yaml", ".yml"} + errCount := 0 + + err := filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + for _, ext := range supportedExt { + if filepath.Ext(path) == ext { + fmt.Printf("Validating: %s\n", path) + _, err := parser.FromFile(path) + if err != nil { + fmt.Printf("Validation failed for %s: %v\n", path, err) + errCount++ + } else { + fmt.Printf("Validation succeeded for %s\n", path) + } + break + } + } + } + return nil + }) + + if err != nil { + fmt.Printf("Error walking the path %s: %v\n", baseDir, err) + os.Exit(1) + } + + if errCount > 0 { + fmt.Printf("Validation failed for %d file(s).\n", errCount) + os.Exit(1) + } + + fmt.Println("All workflows validated successfully.") +} diff --git a/parser/parser.go b/parser/parser.go index 7b7ad93..3707132 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -21,10 +21,9 @@ import ( "path/filepath" "strings" - "sigs.k8s.io/yaml" + "github.com/serverlessworkflow/sdk-go/v3/model" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "sigs.k8s.io/yaml" ) const ( @@ -51,9 +50,9 @@ func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { return nil, err } - ctx := model.NewValidatorContext(workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, val.WorkflowError(err) + err = model.GetValidator().Struct(workflow) + if err != nil { + return nil, err } return workflow, nil } diff --git a/parser/parser_test.go b/parser/parser_test.go index daf6608..9852d5f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -15,1078 +15,131 @@ package parser import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/test" - "github.com/serverlessworkflow/sdk-go/v2/util" ) -func TestBasicValidation(t *testing.T) { - rootPath := "./testdata/workflows" - files, err := os.ReadDir(rootPath) +func TestFromYAMLSource(t *testing.T) { + source := []byte(` +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com +`) + workflow, err := FromYAMLSource(source) assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) +} - util.SetIncludePaths(append(util.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) - - for _, file := range files { - if !file.IsDir() { - path := filepath.Join(rootPath, file.Name()) - workflow, err := FromFile(path) - - if assert.NoError(t, err, "Test File %s", path) { - assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) - assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) +func TestFromJSONSource(t *testing.T) { + source := []byte(`{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } } } - } -} - -func TestCustomValidators(t *testing.T) { - rootPath := "./testdata/workflows/witherrors" - files, err := os.ReadDir(rootPath) + ] +}`) + workflow, err := FromJSONSource(source) assert.NoError(t, err) - for _, file := range files { - if !file.IsDir() { - _, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.Error(t, err, "Test File %s", file.Name()) - } - } + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) } func TestFromFile(t *testing.T) { - files := []struct { - name string - f func(*testing.T, *model.Workflow) + tests := []struct { + name string + filePath string + expectError bool }{ { - "./testdata/workflows/greetings.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0].OperationState) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/actiondata-defaultvalue.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "greeting", w.ID) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.Equal(t, true, w.States[0].OperationState.Actions[0].ActionDataFilter.UseResults) - assert.Equal(t, "greeting", w.States[0].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/greetings.sw.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.IsType(t, "idx", w.States[0].ID) - assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.NotNil(t, w.States[0].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbaseddataandswitch.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.Equal(t, "Start", w.States[0].Name) - assert.Equal(t, "CheckVisaStatus", w.States[1].Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].SwitchState) - assert.Equal(t, "PT1H", w.States[1].SwitchState.Timeouts.EventTimeout) - assert.Nil(t, w.States[1].End) - assert.NotNil(t, w.States[2].End) - assert.True(t, w.States[2].End.Terminate) - }, - }, { - "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { - operationState := w.States[0].OperationState - assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreetingexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) - assert.Equal(t, false, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.p.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - }, - }, { - "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotEmpty(t, w.States[0].EventConditions) - assert.Equal(t, "CheckVisaStatus", w.States[0].Name) - assert.IsType(t, model.EventCondition{}, w.States[0].EventConditions[0]) - }, - }, { - "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 1) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - }, - }, { - "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 2) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", auth[1].Name) - assert.Equal(t, model.AuthTypeBasic, auth[1].Scheme) - basicProperties := auth[1].Properties.Basic - assert.Equal(t, "test_user", basicProperties.Username) - assert.Equal(t, "test_pwd", basicProperties.Password) - // metadata - assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) - assert.Equal(t, model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.Bearer.Metadata) - assert.Equal(t, &model.Metadata{"metadataState": model.FromString("state info")}, w.States[0].Metadata) - }, - }, { - "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/checkinbox.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - operationState := w.States[0].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, w.States, 2) - }, - }, { - // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/workflows/provisionorders.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Provision Orders", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.Len(t, w.States[0].OnErrors, 3) - assert.Equal(t, "Missing order id", w.States[0].OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", w.States[0].OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", w.States[0].OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", w.States[0].OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", w.States[0].OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", w.States[0].OnErrors[2].Transition.NextState) - }, - }, { - "./testdata/workflows/checkinbox.cron-test.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].Transition.NextState) - assert.Nil(t, w.States[0].End) - assert.NotNil(t, w.States[1].End) - assert.True(t, w.States[1].End.Terminate) - }, - }, { - "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].Name) - }, - }, { - // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/workflows/patientonboarding.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Patient Onboarding Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].EventState) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, w.Retries) - assert.Len(t, w.Retries, 1) - assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) - assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) - }, - }, { - "./testdata/workflows/greetings-secret.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 1) - }, - }, { - "./testdata/workflows/greetings-secret-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 3) - }, - }, { - "./testdata/workflows/greetings-constants-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotEmpty(t, w.Constants) - assert.NotEmpty(t, w.Constants.Data["Translations"]) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.file.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/purchaseorderworkflow.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Purchase Order Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "P30D", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Notify Customer", w.Name) - switchState := w.States[1].SwitchState - - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - - endDataCondition := switchState.DataConditions[0] - assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) - assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) - assert.Equal(t, model.FromString("${ del(.customerCount) }"), endDataCondition.End.ContinueAs.Data) - assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) - assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) - assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) - }, - }, { - name: "./testdata/workflows/greetings-v08-spec.sw.yaml", - f: func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "custom.greeting", w.ID) - assert.Equal(t, "1.0", w.Version) - assert.Equal(t, "0.8", w.SpecVersion) - - // Workflow "name" no longer a required property - assert.Empty(t, w.Name) - - // Functions: - assert.NotEmpty(t, w.Functions[0]) - assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) - assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) - assert.Equal(t, "/path/to/my/script/greeting.ts#CustomGreeting", w.Functions[0].Operation) - - assert.NotEmpty(t, w.Functions[1]) - assert.Equal(t, "sendTextFunction", w.Functions[1].Name) - assert.Equal(t, model.FunctionTypeGraphQL, w.Functions[1].Type) - assert.Equal(t, "http://myapis.org/inboxapi.json#sendText", w.Functions[1].Operation) - - assert.NotEmpty(t, w.Functions[2]) - assert.Equal(t, "greetingFunction", w.Functions[2].Name) - assert.Equal(t, model.FunctionTypeREST, w.Functions[2].Type) - assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) - - // Delay state - assert.NotEmpty(t, w.States[0].DelayState.TimeDelay) - assert.Equal(t, "GreetDelay", w.States[0].Name) - assert.Equal(t, model.StateTypeDelay, w.States[0].Type) - assert.Equal(t, "StoreCarAuctionBid", w.States[0].Transition.NextState) - - // Event state - assert.NotEmpty(t, w.States[1].EventState.OnEvents) - assert.Equal(t, "StoreCarAuctionBid", w.States[1].Name) - assert.Equal(t, model.StateTypeEvent, w.States[1].Type) - assert.Equal(t, true, w.States[1].EventState.Exclusive) - assert.NotEmpty(t, true, w.States[1].EventState.OnEvents[0]) - assert.Equal(t, []string{"CarBidEvent"}, w.States[1].EventState.OnEvents[0].EventRefs) - assert.Equal(t, true, w.States[1].EventState.OnEvents[0].EventDataFilter.UseData) - assert.Equal(t, "test", w.States[1].EventState.OnEvents[0].EventDataFilter.Data) - assert.Equal(t, "testing", w.States[1].EventState.OnEvents[0].EventDataFilter.ToStateData) - assert.Equal(t, model.ActionModeParallel, w.States[1].EventState.OnEvents[0].ActionMode) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.RefName) - assert.Equal(t, "funcref1", w.States[1].EventState.OnEvents[0].Actions[0].Name) - assert.Equal(t, map[string]model.Object{"bid": model.FromString("${ .bid }")}, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.Arguments) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[1].EventRef) - assert.Equal(t, "eventRefName", w.States[1].EventState.OnEvents[0].Actions[1].Name) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ResultEventRef) - - data := model.FromString("${ .patientInfo }") - assert.Equal(t, &data, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.Data) - assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ContextAttributes) - - assert.Equal(t, "PT1S", w.States[1].EventState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[1].EventState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "PT1H", w.States[1].EventState.Timeouts.EventTimeout) - assert.Equal(t, "PT3S", w.States[1].EventState.Timeouts.ActionExecTimeout) - - // Parallel state - assert.NotEmpty(t, w.States[2].ParallelState.Branches) - assert.Equal(t, "ShortDelayBranch", w.States[2].ParallelState.Branches[0].Name) - assert.Equal(t, "shortdelayworkflowid", w.States[2].ParallelState.Branches[0].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "PT5H", w.States[2].ParallelState.Branches[0].Timeouts.ActionExecTimeout) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Branches[0].Timeouts.BranchExecTimeout) - assert.Equal(t, "LongDelayBranch", w.States[2].ParallelState.Branches[1].Name) - assert.Equal(t, "longdelayworkflowid", w.States[2].ParallelState.Branches[1].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "ParallelExec", w.States[2].Name) - assert.Equal(t, model.StateTypeParallel, w.States[2].Type) - assert.Equal(t, model.CompletionTypeAtLeast, w.States[2].ParallelState.CompletionType) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Timeouts.BranchExecTimeout) - assert.Equal(t, "PT1S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, intstr.IntOrString{IntVal: 13}, w.States[2].ParallelState.NumCompleted) - - // Switch state - assert.NotEmpty(t, w.States[3].SwitchState.EventConditions) - assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].Name) - assert.Equal(t, model.StateTypeSwitch, w.States[3].Type) - assert.Equal(t, "visaApprovedEvent", w.States[3].EventConditions[0].Name) - assert.Equal(t, "visaApprovedEventRef", w.States[3].EventConditions[0].EventRef) - assert.Equal(t, "HandleApprovedVisa", w.States[3].EventConditions[0].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "mastercard": model.FromString("disallowed"), - "visa": model.FromString("allowed"), - }, - w.States[3].EventConditions[0].Metadata, - ) - assert.Equal(t, "visaRejectedEvent", w.States[3].EventConditions[1].EventRef) - assert.Equal(t, "HandleRejectedVisa", w.States[3].EventConditions[1].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "test": model.FromString("tested"), - }, - w.States[3].EventConditions[1].Metadata, - ) - assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) - assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "HandleNoVisaDecision", w.States[3].SwitchState.DefaultCondition.Transition.NextState) - - // DataBasedSwitchState - dataBased := w.States[4].SwitchState - assert.NotEmpty(t, dataBased.DataConditions) - assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].Name) - dataCondition := dataBased.DataConditions[0] - assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) - assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) - assert.Equal(t, "RejectApplication", w.States[4].DefaultCondition.Transition.NextState) - assert.Equal(t, "PT1S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Single) - - // operation state - assert.NotEmpty(t, w.States[5].OperationState.Actions) - assert.Equal(t, "GreetSequential", w.States[5].Name) - assert.Equal(t, model.StateTypeOperation, w.States[5].Type) - assert.Equal(t, model.ActionModeSequential, w.States[5].OperationState.ActionMode) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[5].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].FunctionRef.RefName) - - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) - assert.Equal(t, "PT1H", w.States[5].OperationState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT1S", w.States[5].OperationState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[5].OperationState.Timeouts.StateExecTimeout.Single) - - // forEach state - assert.NotEmpty(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "SendTextForHighPriority", w.States[6].Name) - assert.Equal(t, model.ForEachModeTypeSequential, w.States[6].ForEachState.Mode) - assert.Equal(t, model.StateTypeForEach, w.States[6].Type) - assert.Equal(t, "${ .messages }", w.States[6].ForEachState.InputCollection) - assert.Equal(t, "${ .outputMessages }", w.States[6].ForEachState.OutputCollection) - assert.Equal(t, "${ .this }", w.States[6].ForEachState.IterationParam) - - batchSize := intstr.FromInt(45) - assert.Equal(t, &batchSize, w.States[6].ForEachState.BatchSize) - - assert.NotNil(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "test", w.States[6].ForEachState.Actions[0].Name) - assert.NotNil(t, w.States[6].ForEachState.Actions[0].FunctionRef) - assert.Equal(t, "sendTextFunction", w.States[6].ForEachState.Actions[0].FunctionRef.RefName) - assert.Equal(t, map[string]model.Object{"message": model.FromString("${ .singlemessage }")}, w.States[6].ForEachState.Actions[0].FunctionRef.Arguments) - - // assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) - - assert.Equal(t, "PT11H", w.States[6].ForEachState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT11S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Single) - - // Inject state - assert.Equal(t, "HelloInject", w.States[7].Name) - assert.Equal(t, model.StateTypeInject, w.States[7].Type) - assert.Equal(t, model.FromString("Hello World, last state!"), w.States[7].InjectState.Data["result"]) - assert.Equal(t, model.FromBool(false), w.States[7].InjectState.Data["boolValue"]) - assert.Equal(t, "PT11M", w.States[7].InjectState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[7].InjectState.Timeouts.StateExecTimeout.Single) - - // callback state - assert.NotEmpty(t, w.States[8].CallbackState.Action) - assert.Equal(t, "CheckCreditCallback", w.States[8].Name) - assert.Equal(t, model.StateTypeCallback, w.States[8].Type) - assert.Equal(t, "callCreditCheckMicroservice", w.States[8].CallbackState.Action.FunctionRef.RefName) - assert.Equal(t, - map[string]model.Object{ - "argsObj": model.FromMap(map[string]interface{}{"age": 10, "name": "hi"}), - "customer": model.FromString("${ .customer }"), - "time": model.FromInt(48), - }, - w.States[8].CallbackState.Action.FunctionRef.Arguments, - ) - assert.Equal(t, "PT10S", w.States[8].CallbackState.Action.Sleep.Before) - assert.Equal(t, "PT20S", w.States[8].CallbackState.Action.Sleep.After) - assert.Equal(t, "PT150M", w.States[8].CallbackState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT34S", w.States[8].CallbackState.Timeouts.EventTimeout) - assert.Equal(t, "PT115M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Single) - - assert.Equal(t, true, w.States[8].CallbackState.EventDataFilter.UseData) - assert.Equal(t, "test data", w.States[8].CallbackState.EventDataFilter.Data) - assert.Equal(t, "${ .customer }", w.States[8].CallbackState.EventDataFilter.ToStateData) - - // sleepState - assert.NotEmpty(t, w.States[9].SleepState.Duration) - assert.Equal(t, "WaitForCompletionSleep", w.States[9].Name) - assert.Equal(t, model.StateTypeSleep, w.States[9].Type) - assert.Equal(t, "PT5S", w.States[9].SleepState.Duration) - assert.NotNil(t, w.States[9].SleepState.Timeouts) - assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, true, w.States[9].End.Terminate) - - // switch state with DefaultCondition as string - assert.NotEmpty(t, w.States[10].SwitchState) - assert.Equal(t, "HelloStateWithDefaultConditionString", w.States[10].Name) - assert.Equal(t, "${ true }", w.States[10].SwitchState.DataConditions[0].Condition) - assert.Equal(t, "HandleApprovedVisa", w.States[10].SwitchState.DataConditions[0].Transition.NextState) - assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) - assert.Equal(t, true, w.States[10].End.Terminate) - }, - }, { - "./testdata/workflows/dataInputSchemaObject.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.DataInputSchema) - expected := model.Object{} - err := json.Unmarshal([]byte("{\"title\": \"Hello World Schema\", \"properties\": {\"person\": "+ - "{\"type\": \"object\",\"properties\": {\"name\": {\"type\": \"string\"}},\"required\": "+ - "[\"name\"]}}, \"required\": [\"person\"]}"), - &expected) - fmt.Printf("err: %s\n", err) - fmt.Printf("schema: %+v\n", expected) - assert.Equal(t, &expected, w.DataInputSchema.Schema) - assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) - }, + name: "Valid YAML File", + filePath: "testdata/valid_workflow.yaml", + expectError: false, + }, + { + name: "Invalid YAML File", + filePath: "testdata/invalid_workflow.yaml", + expectError: true, + }, + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Non-existent File", + filePath: "testdata/nonexistent_workflow.yaml", + expectError: true, }, } - for _, file := range files { - t.Run( - file.name, func(t *testing.T) { - workflow, err := FromFile(file.name) - if assert.NoError(t, err, "Test File %s", file.name) { - assert.NotNil(t, workflow, "Test File %s", file.name) - file.f(t, workflow) - } - }, - ) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + workflow, err := FromFile(tt.filePath) + if tt.expectError { + assert.Error(t, err) + assert.Nil(t, workflow) + } else { + assert.NoError(t, err) + assert.NotNil(t, workflow) + assert.Equal(t, "example-workflow", workflow.Document.Name) + } + }) } } -func TestUnmarshalWorkflowBasicTests(t *testing.T) { - t.Run("BasicWorkflowYamlNoAuthDefs", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: TestUnmarshalWorkflowBasicTests -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: inject - data: - result: Hello World! - end: true -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow) - - b, err := json.Marshal(workflow) - assert.Nil(t, err) - assert.True(t, !strings.Contains(string(b), "auth")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) - - t.Run("BasicWorkflowBasicAuthJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ], - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "transition": "Next Hello State" - }, - { - "name": "Next Hello State", - "type": "inject", - "data": { - "result": "Next Hello World!" +func TestCheckFilePath(t *testing.T) { + tests := []struct { + name string + filePath string + expectError bool + }{ + { + name: "Valid YAML File Path", + filePath: "testdata/valid_workflow.yaml", + expectError: false, }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowBasicAuthStringJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": "testdata/workflows/urifiles/auth.json", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowInteger", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.7", - "auth": 123, - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - - assert.NotNil(t, err) - assert.Equal(t, "auth must be string or array", err.Error()) - assert.Nil(t, workflow) - }) -} - -func TestUnmarshalWorkflowSwitchState(t *testing.T) { - t.Run("WorkflowStatesTest", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowStatesTest -description: Inject Hello World -start: GreetDelay -metadata: - metadata1: metadata1 - metadata2: metadata2 -auth: -- name: testAuth - scheme: bearer - properties: - token: test_token - metadata: - auth1: auth1 - auth2: auth2 -events: -- name: StoreBidFunction - type: store -- name: CarBidEvent - type: store -- name: visaRejectedEvent - type: store -- name: visaApprovedEventRef - type: store -functions: -- name: callCreditCheckMicroservice - operation: http://myapis.org/creditcheck.json#checkCredit -- name: StoreBidFunction - operation: http://myapis.org/storebid.json#storeBid -- name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: -- name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid -- name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: bidFunctionRef - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .thatBid }" - time: 32 - name: bidEventRef - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec -- name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased -- name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: - nextState: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: - nextState: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT10H - stateExecTimeout: - total: PT10S - single: PT20S - defaultCondition: - transition: - nextState: HelloStateWithDefaultConditionString -- name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority -- name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject -- name: HelloInject - type: inject - data: - result: Hello World, another state! - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: WaitForCompletionSleep -- name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true -- name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - time: 48 - argsObj: { - "name" : "hi", - "age": { - "initial": 10, - "final": 32 - } - } - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT199M - eventTimeout: PT348S - stateExecTimeout: - total: PT115M - single: PT22M - transition: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 50 - name: eventRefName - timeouts: - actionExecTimeout: PT777S - stateExecTimeout: - total: PT33M - single: PT123M - transition: HandleApprovedVisaSubFlow -- name: HandleApprovedVisaSubFlow - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -`)) - assert.NoError(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.NoError(t, err) - - // workflow and auth metadata - assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) - assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) - - // Callback state - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"},\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) - - // Operation State - assert.True(t, strings.Contains(string(b), `{"name":"HandleApprovedVisa","type":"operation","transition":{"nextState":"HandleApprovedVisaSubFlow"},"actionMode":"sequential","actions":[{"name":"eventRefName","eventRef":{"triggerEventRef":"StoreBidFunction","resultEventRef":"StoreBidFunction","data":"${ .patientInfo }","contextAttributes":{"customer":"${ .customer }","time":50},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"timeouts":{"stateExecTimeout":{"single":"PT123M","total":"PT33M"},"actionExecTimeout":"PT777S"}}`)) - - // Delay State - assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"StoreCarAuctionBid\"},\"timeDelay\":\"PT5S\"}")) - - // Event State - assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"transition\":{\"nextState\":\"ParallelExec\"},\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) - - // Parallel State - assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) - - // Switch State - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) - - // Switch State with string DefaultCondition - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"dataConditions\":[{\"condition\":\"${ true }\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) - - // Foreach State - assert.True(t, strings.Contains(string(b), `{"name":"SendTextForHighPriority","type":"foreach","transition":{"nextState":"HelloInject"},"inputCollection":"${ .messages }","outputCollection":"${ .outputMessages }","iterationParam":"${ .this }","batchSize":45,"actions":[{"name":"test","functionRef":{"refName":"sendTextFunction","arguments":{"message":"${ .singlemessage }"},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"mode":"sequential","timeouts":{"stateExecTimeout":{"single":"PT22S","total":"PT11S"},"actionExecTimeout":"PT11H"}}`)) - - // Inject State - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"transition\":{\"nextState\":\"WaitForCompletionSleep\"},\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) - - // Sleep State - assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - // Make sure that the Action FunctionRef is unmarshalled correctly - assert.Equal(t, model.FromString("${ .singlemessage }"), workflow.States[5].ForEachState.Actions[0].FunctionRef.Arguments["message"]) - assert.Equal(t, "sendTextFunction", workflow.States[5].ForEachState.Actions[0].FunctionRef.RefName) - assert.NoError(t, err) - - }) - - t.Run("WorkflowSwitchStateDataConditions with wrong field name", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowSwitchStateDataConditions with wrong field name -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: switch - dataCondition: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: - transition: - nextState: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleRejectedVisaWorkflowID - end: - terminate: true -- name: HandleNoVisaDecision - type: operation - actions: - - subFlowRef: - workflowId: handleNoVisaDecisionWorkflowId - end: - terminate: true -`)) - if assert.Error(t, err) { - assert.Equal(t, `workflow.states[0].switchState.dataConditions is required`, err.Error()) - } - assert.Nil(t, workflow) - }) - - t.Run("Test complex workflow with compensate transitions", func(t *testing.T) { - workflow, err := FromFile("./testdata/workflows/compensate.sw.json") - - assert.Nil(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.Nil(t, err) + { + name: "Unsupported File Extension", + filePath: "testdata/unsupported_workflow.txt", + expectError: true, + }, + { + name: "Directory Path", + filePath: "testdata", + expectError: true, + }, + } - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := checkFilePath(tt.filePath) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } } diff --git a/parser/testdata/applicationrequestfunctions.json b/parser/testdata/applicationrequestfunctions.json deleted file mode 100644 index bafc861..0000000 --- a/parser/testdata/applicationrequestfunctions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/application.json#emailRejection" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/applicationrequestretries.json b/parser/testdata/applicationrequestretries.json deleted file mode 100644 index 40f83b5..0000000 --- a/parser/testdata/applicationrequestretries.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/constantsDogs.json b/parser/testdata/constantsDogs.json deleted file mode 100644 index cd3f101..0000000 --- a/parser/testdata/constantsDogs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Translations": { - "Dog": { - "Serbian": "pas", - "Spanish": "perro", - "French": "chien" - } - } -} \ No newline at end of file diff --git a/parser/testdata/datainputschema.json b/parser/testdata/datainputschema.json deleted file mode 100644 index bace233..0000000 --- a/parser/testdata/datainputschema.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - } -} \ No newline at end of file diff --git a/parser/testdata/errors.json b/parser/testdata/errors.json deleted file mode 100644 index 099e14d..0000000 --- a/parser/testdata/errors.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventbasedgreetingevents.json b/parser/testdata/eventbasedgreetingevents.json deleted file mode 100644 index b63f2bf..0000000 --- a/parser/testdata/eventbasedgreetingevents.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/functiondefs.json b/parser/testdata/functiondefs.json deleted file mode 100644 index fc7dd94..0000000 --- a/parser/testdata/functiondefs.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "functions": [ - { - "name": "checkFundsAvailability", - "operation": "file://myapis/billingapis.json#checkFunds" - }, - { - "name": "sendSuccessEmail", - "operation": "file://myapis/emailapis.json#paymentSuccess" - }, - { - "name": "sendInsufficientFundsEmail", - "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventdefs.yml b/parser/testdata/invalid_workflow.yaml similarity index 59% rename from parser/testdata/eventdefs.yml rename to parser/testdata/invalid_workflow.yaml index dd2c3b7..32e25a9 100644 --- a/parser/testdata/eventdefs.yml +++ b/parser/testdata/invalid_workflow.yaml @@ -1,22 +1,25 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -events: -- name: PaymentReceivedEvent - type: payment.receive - source: paymentEventSource - correlation: - - contextAttributeName: accountId -- name: ConfirmationCompletedEvent - type: payment.confirmation - kind: produced \ No newline at end of file + +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + # Missing "endpoint" field, making it invalid \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json deleted file mode 100644 index e5316d9..0000000 --- a/parser/testdata/secrets.json +++ /dev/null @@ -1,6 +0,0 @@ - -[ - "SECRET1", - "SECRET2", - "SECRET3" -] \ No newline at end of file diff --git a/parser/testdata/timeouts.json b/parser/testdata/timeouts.json deleted file mode 100644 index c3586bd..0000000 --- a/parser/testdata/timeouts.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } -} \ No newline at end of file diff --git a/parser/testdata/valid_workflow.json b/parser/testdata/valid_workflow.json new file mode 100644 index 0000000..204e917 --- /dev/null +++ b/parser/testdata/valid_workflow.json @@ -0,0 +1,19 @@ +{ + "document": { + "dsl": "1.0.0", + "namespace": "examples", + "name": "example-workflow", + "version": "1.0.0" + }, + "do": [ + { + "task1": { + "call": "http", + "with": { + "method": "GET", + "endpoint": "http://example.com" + } + } + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.yaml b/parser/testdata/valid_workflow.yaml similarity index 62% rename from parser/testdata/workflows/urifiles/auth.yaml rename to parser/testdata/valid_workflow.yaml index 14ba4e2..19df6c4 100644 --- a/parser/testdata/workflows/urifiles/auth.yaml +++ b/parser/testdata/valid_workflow.yaml @@ -1,10 +1,10 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -- name: testAuth - properties: - token: test_token - scheme: bearer -- name: testAuth2 - properties: - password: test_pwd - username: test_user - scheme: basic +document: + dsl: 1.0.0 + namespace: examples + name: example-workflow + version: 1.0.0 +do: + - task1: + call: http + with: + method: GET + endpoint: http://example.com \ No newline at end of file diff --git a/parser/testdata/workflows/VetAppointmentWorkflow.json b/parser/testdata/workflows/VetAppointmentWorkflow.json deleted file mode 100644 index f6c0d43..0000000 --- a/parser/testdata/workflows/VetAppointmentWorkflow.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "id": "VetAppointmentWorkflow", - "name": "Vet Appointment Workflow", - "description": "Vet service call via events", - "version": "1.0", - "specVersion": "0.8", - "start": "MakeVetAppointmentState", - "events": [ - { - "name": "MakeVetAppointment", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "produced" - }, - { - "name": "VetAppointmentInfo", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "consumed" - } - ], - "states": [ - { - "name": "MakeVetAppointmentState", - "type": "operation", - "actions": [ - { - "name": "MakeAppointmentAction", - "eventRef": { - "triggerEventRef": "MakeVetAppointment", - "data": "${ .patientInfo }", - "resultEventRef": "VetAppointmentInfo" - }, - "actionDataFilter": { - "results": "${ .appointmentInfo }" - } - } - ], - "timeouts": { - "actionExecTimeout": "PT15M" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/actiondata-defaultvalue.yaml b/parser/testdata/workflows/actiondata-defaultvalue.yaml deleted file mode 100644 index 6b1628d..0000000 --- a/parser/testdata/workflows/actiondata-defaultvalue.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - id: greetingId - name: Greet - type: operation - actions: - - name: greeting - functionRef: - refName: greetingFunction - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue103.json b/parser/testdata/workflows/applicationrequest-issue103.json deleted file mode 100644 index 48b71fc..0000000 --- a/parser/testdata/workflows/applicationrequest-issue103.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": "./testdata/workflows/urifiles/auth.yaml", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml deleted file mode 100644 index 395ac8b..0000000 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: applicantrequest -version: '1.0' -name: Applicant Request Decision Workflow -description: Determine if applicant request is valid -start: CheckApplication -specVersion: "0.8" -functions: - - name: sendRejectionEmailFunction - operation: http://myapis.org/applicationapi.json#emailRejection -states: - - name: CheckApplication - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: StartApplication - - condition: "${ .applicants | .age < 18 }" - transition: RejectApplication - defaultCondition: - transition: RejectApplication - - name: StartApplication - type: operation - actions: - - subFlowRef: - workflowId: startApplicationWorkflowId - end: true - - name: RejectApplication - type: operation - actionMode: sequential - actions: - - functionRef: - refName: sendRejectionEmailFunction - arguments: - applicant: "${ .applicant }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json deleted file mode 100644 index 99b373c..0000000 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": "file://testdata/workflows/urifiles/auth.json", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json deleted file mode 100644 index 674532a..0000000 --- a/parser/testdata/workflows/applicationrequest.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "specVersion": "0.8", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": "StartApplication" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json deleted file mode 100644 index 0bdfe5f..0000000 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "metadata":{ - "metadata1": "metadata1", - "metadata2": "metadata2" - }, - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token", - "metadata":{ - "auth1": "auth1", - "auth2": "auth2" - } - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "metadata": { - "metadataState": "state info" - }, - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json deleted file mode 100644 index 309cf8f..0000000 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "start": { - "stateName": "CheckApplication" - }, - "functions": "file://testdata/applicationrequestfunctions.json", - "retries": "file://testdata/applicationrequestretries.json", - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json deleted file mode 100644 index c7c341d..0000000 --- a/parser/testdata/workflows/applicationrequest.url.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestfunctions.json", - "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/refs/heads/4.0.x/api/src/test/resources/features/applicantrequestretries.json", - "start": { - "stateName": "CheckApplication" - }, - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkInbox.json b/parser/testdata/workflows/checkInbox.json deleted file mode 100644 index 0256a8e..0000000 --- a/parser/testdata/workflows/checkInbox.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "checkInbox", - "name": "Check Inbox Workflow", - "version": "1.0", - "specVersion": "0.8", - "description": "Periodically Check Inbox", - "start": { - "stateName": "CheckInbox", - "schedule": { - "cron": "0 0/15 * * * ?" - } - }, - "functions": [ - { - "name": "checkInboxFunction", - "operation": "http://myapis.org/inboxapi.json#checkNewMessages" - }, - { - "name": "sendTextFunction", - "operation": "http://myapis.org/inboxapi.json#sendText" - } - ], - "states": [ - { - "name": "CheckInbox", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": "checkInboxFunction" - } - ], - "transition": "SendTextForHighPriority" - }, - { - "name": "SendTextForHighPriority", - "type": "foreach", - "inputCollection": "${ .messages }", - "iterationParam": "singlemessage", - "actions": [ - { - "functionRef": { - "refName": "sendTextFunction", - "arguments": { - "message": "${ .singlemessage }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkcarvitals.json b/parser/testdata/workflows/checkcarvitals.json deleted file mode 100644 index a0f14ef..0000000 --- a/parser/testdata/workflows/checkcarvitals.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "id": "checkcarvitals", - "name": "Check Car Vitals Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "WhenCarIsOn", - "states": [ - { - "name": "WhenCarIsOn", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "CarTurnedOnEvent" - ] - } - ], - "transition": "DoCarVitalChecks" - }, - { - "name": "DoCarVitalChecks", - "type": "operation", - "actions": [ - { - "subFlowRef": "vitalscheck", - "sleep": { - "after": "PT1S" - } - } - ], - "transition": "CheckContinueVitalChecks" - }, - { - "name": "CheckContinueVitalChecks", - "type": "switch", - "eventConditions": [ - { - "name": "Car Turned Off Condition", - "eventRef": "CarTurnedOffEvent", - "end": true - } - ], - "defaultCondition": { - "transition": "DoCarVitalChecks" - } - } - ], - "events": [ - { - "name": "CarTurnedOnEvent", - "type": "car.events", - "source": "my/car" - }, - { - "name": "CarTurnedOffEvent", - "type": "car.events", - "source": "my/car" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml deleted file mode 100644 index 0729e80..0000000 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: checkInboxFunction - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml deleted file mode 100644 index e42d9a2..0000000 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: - expression: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: - refName: checkInboxFunction - transition: - nextState: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/compensate.sw.json b/parser/testdata/workflows/compensate.sw.json deleted file mode 100644 index 9f6ab1f..0000000 --- a/parser/testdata/workflows/compensate.sw.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "id": "compensation", - "version": "1.0", - "name": "Workflow Error example", - "description": "An example of how compensation works", - "specVersion": "0.8", - "start": "printStatus", - "functions": [ - { - "name": "PrintOutput", - "type": "custom", - "operation": "sysout" - } - ], - "states": [ - { - "name": "printStatus", - "type": "inject", - "data": { - "compensated": false - }, - "compensatedBy": "compensating", - "transition": "branch" - }, - { - "name": "branch", - "type": "switch", - "dataConditions": [ - { - "condition": ".shouldCompensate==true", - "transition": { - "nextState": "finish_compensate", - "compensate": true - } - }, - { - "condition": ".shouldCompensate==false", - "transition": { - "nextState": "finish_not_compensate", - "compensate": false - } - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "compensating", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensated": true - }, - "transition": "compensating_more" - }, - { - "name": "compensating_more", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensating_more": "Real Betis Balompie" - }, - "end": true - }, - { - "name": "finish_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - }, - { - "name": "finish_not_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_not_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/compensation.sw.json b/parser/testdata/workflows/compensation.sw.json deleted file mode 100644 index 567a501..0000000 --- a/parser/testdata/workflows/compensation.sw.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "id": "compensation", - "version": "1.0", - "name": "Workflow Error example", - "description": "An example of how compensation works", - "start": "printStatus", - "states": [ - { - "name": "printStatus", - "type": "inject", - "data": { - "compensated": false - }, - "compensatedBy" : "compensating", - "transition": "branch" - }, - { - "name": "branch", - "type": "switch", - "dataConditions": [ - { - "condition": ".shouldCompensate==true", - "transition": { - "nextState" : "finish_compensate", - "compensate" : true - } - }, - { - "condition": ".shouldCompensate==false", - "transition": { - "nextState" : "finish_not_compensate", - "compensate" : false - } - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "compensating", - "usedForCompensation" : true, - "type": "inject", - "data": { - "compensated": true - }, - "transition" : "compensating_more" - }, - { - "name": "compensating_more", - "usedForCompensation" : true, - "type": "inject", - "data": { - "compensating_more": "Real Betis Balompie" - } - }, - { - "name": "finish_compensate", - "type": "operation", - "actions": [], - "end": { - "compensate": true - } - }, - { - "name": "finish_not_compensate", - "type": "operation", - "actions": [], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/conditionbasedstate.yaml b/parser/testdata/workflows/conditionbasedstate.yaml deleted file mode 100644 index f42b56d..0000000 --- a/parser/testdata/workflows/conditionbasedstate.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - condition: "${ .applicants | .age < 18 }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/continue-as-example.yaml b/parser/testdata/workflows/continue-as-example.yaml deleted file mode 100644 index b5957f5..0000000 --- a/parser/testdata/workflows/continue-as-example.yaml +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: notifycustomerworkflow -name: Notify Customer -version: '1.0' -specVersion: '0.8' -start: WaitForCustomerEvent -states: - - name: WaitForCustomerEvent - type: event - onEvents: - - eventRefs: - - CustomerEvent - eventDataFilter: - data: "${ .customerId }" - toStateData: "${ .eventCustomerId }" - actions: - - functionRef: - refName: NotifyCustomerFunction - arguments: - customerId: "${ .eventCustomerId }" - stateDataFilter: - output: "${ .count = .count + 1 }" - transition: CheckEventQuota - - name: CheckEventQuota - type: switch - dataConditions: - - condition: "${ try(.customerCount) != null and .customerCount > .quota.maxConsumedEvents}" - end: - continueAs: - workflowId: notifycustomerworkflow - version: '1.0' - data: "${ del(.customerCount) }" - workflowExecTimeout: - duration: "PT1H" - runBefore: "GenerateReport" - interrupt: true - defaultCondition: - transition: WaitForCustomerEvent -events: - - name: CustomerEvent - type: org.events.customerEvent - source: customerSource -functions: - - name: NotifyCustomerFunction - operation: http://myapis.org/customerapis.json#notifyCustomer \ No newline at end of file diff --git a/parser/testdata/workflows/customerbankingtransactions.json b/parser/testdata/workflows/customerbankingtransactions.json deleted file mode 100644 index 98fbd34..0000000 --- a/parser/testdata/workflows/customerbankingtransactions.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "id": "customerbankingtransactions", - "name": "Customer Banking Transactions Workflow", - "version": "1.0", - "specVersion": "0.8", - "autoRetries": true, - "constants": { - "largetxamount": 5000 - }, - "states": [ - { - "name": "ProcessTransactions", - "type": "foreach", - "inputCollection": "${ .customer.transactions }", - "iterationParam": "${ .tx }", - "actions": [ - { - "name": "Process Larger Transaction", - "functionRef": "Banking Service - Larger Tx", - "condition": "${ .tx >= $CONST.largetxamount }" - }, - { - "name": "Process Smaller Transaction", - "functionRef": "Banking Service - Smaller Tx", - "condition": "${ .tx < $CONST.largetxamount }" - } - ], - "end": true - } - ], - "functions": [ - { - "name": "Banking Service - Larger Tx", - "type": "asyncapi", - "operation": "banking.yaml#largerTransation" - }, - { - "name": "Banking Service - Smaller Tx", - "type": "asyncapi", - "operation": "banking.yaml#smallerTransation" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/customercreditcheck.json b/parser/testdata/workflows/customercreditcheck.json deleted file mode 100644 index 8a3914f..0000000 --- a/parser/testdata/workflows/customercreditcheck.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "customercreditcheck", - "version": "1.0", - "specVersion": "0.8", - "name": "Customer Credit Check Workflow", - "description": "Perform Customer Credit Check", - "start": "CheckCredit", - "functions": [ - { - "name": "creditCheckFunction", - "operation": "http://myapis.org/creditcheckapi.json#doCreditCheck" - }, - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/creditcheckapi.json#rejectionEmail" - }, - { - "name": "callCreditCheckMicroservice", - "operation": "http://myapis.org/creditcheckapi.json#creditCheckMicroservice" - } - ], - "events": [ - { - "name": "CreditCheckCompletedEvent", - "type": "creditCheckCompleteType", - "source": "creditCheckSource", - "correlation": [ - { - "contextAttributeName": "customerId" - } - ] - } - ], - "states": [ - { - "name": "CheckCredit", - "type": "callback", - "action": { - "functionRef": { - "refName": "callCreditCheckMicroservice", - "arguments": { - "customer": "${ .customer }" - } - } - }, - "eventRef": "CreditCheckCompletedEvent", - "timeouts": { - "stateExecTimeout": "PT15M" - }, - "transition": "EvaluateDecision" - }, - { - "name": "EvaluateDecision", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .creditCheck | .decision == \"Approved\" }", - "transition": "StartApplication" - }, - { - "condition": "${ .creditCheck | .decision == \"Denied\" }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaNotExists.yaml b/parser/testdata/workflows/dataInputSchemaNotExists.yaml deleted file mode 100644 index 7aa3712..0000000 --- a/parser/testdata/workflows/dataInputSchemaNotExists.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2024 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: orderprocessing -version: '1.0' -specVersion: '0.8' -start: ChooseOnLanguage -dataInputSchema: - schema: doesnexist.json - failOnValidationErrors: true -functions: - - name: greetFunction - type: custom - operation: sysout -states: - - name: ChooseOnLanguage - type: switch - dataConditions: - - condition: "${ .language == \"English\" }" - transition: GreetInEnglish - - condition: "${ .language == \"Spanish\" }" - transition: GreetInSpanish - defaultCondition: GreetInEnglish - - name: GreetInEnglish - type: inject - data: - greeting: "Hello from JSON Workflow, " - transition: GreetPerson - - name: GreetInSpanish - type: inject - data: - greeting: "Saludos desde JSON Workflow, " - transition: GreetPerson - - name: GreetPerson - type: operation - actions: - - name: greetAction - functionRef: - refName: greetFunction - arguments: - message: ".greeting+.name" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaObject.json b/parser/testdata/workflows/dataInputSchemaObject.json deleted file mode 100644 index 7b50c0d..0000000 --- a/parser/testdata/workflows/dataInputSchemaObject.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "id": "greeting", - "version": "1.0.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "dataInputSchema": { - "failOnValidationErrors": false, - "schema": { - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - }, - "required": [ - "person" - ] - } - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ {greeting: .greeting} }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaValidation.yaml b/parser/testdata/workflows/dataInputSchemaValidation.yaml deleted file mode 100644 index 4bc1e11..0000000 --- a/parser/testdata/workflows/dataInputSchemaValidation.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2023 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: Valid DataInputSchema -version: '1.0' -specVersion: '0.8' -start: Start -dataInputSchema: - failOnValidationErrors: false - schema: "file://testdata/datainputschema.json" -states: -- name: Start - type: inject - data: - done: true - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json deleted file mode 100644 index bdf80d6..0000000 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "id": "eventbaseddataandswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.8", - "start": { - "stateName": "Start" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "Start", - "type": "switch", - "dataConditions": [ - { - "condition": "${ true }", - "transition": "CheckVisaStatus" - } - ], - "defaultCondition": { - "transition": { - "nextState": "CheckVisaStatus" - } - } - }, - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json deleted file mode 100644 index 3510d11..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "start": "Greet", - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .greet }", - "toStateData": "${ .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet.name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload.greeting }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json deleted file mode 100644 index 589ad36..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": "file://testdata/eventbasedgreetingevents.json", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json deleted file mode 100644 index 80e81b0..0000000 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "id": "eventbasedgreetingexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - }, - { - "name": "greetingFunction2", - "operation": "file://myapis/greetingapis.json#greeting2" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - }, - { - "eventRefs": [ - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "{{ $.data.greet2 }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction2", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json deleted file mode 100644 index 946aa39..0000000 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "eventbasedgreetingnonexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent", - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json deleted file mode 100644 index 3d0075f..0000000 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "id": "eventbasedswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "specVersion": "0.8", - "start": { - "stateName": "CheckVisaStatus" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedswitchstate.json b/parser/testdata/workflows/eventbasedswitchstate.json deleted file mode 100644 index c1b48b0..0000000 --- a/parser/testdata/workflows/eventbasedswitchstate.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "id": "eventbasedswitchstate", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "start": "CheckVisaStatus", - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": "HandleApprovedVisa" - }, - { - "eventRef": "visaRejectedEvent", - "transition": "HandleRejectedVisa" - } - ], - "eventTimeout": "PT1H", - "defaultCondition": { - "transition": "HandleNoVisaDecision" - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleApprovedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleRejectedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleNoVisaDecisionWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/fillglassofwater.json b/parser/testdata/workflows/fillglassofwater.json deleted file mode 100644 index b45d84e..0000000 --- a/parser/testdata/workflows/fillglassofwater.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "id": "fillglassofwater", - "name": "Fill glass of water workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "Check if full", - "functions": [ - { - "name": "Increment Current Count Function", - "type": "expression", - "operation": ".counts.current += 1 | .counts.current" - } - ], - "states": [ - { - "name": "Check if full", - "type": "switch", - "dataConditions": [ - { - "name": "Need to fill more", - "condition": "${ .counts.current < .counts.max }", - "transition": "Add Water" - }, - { - "name": "Glass full", - "condition": ".counts.current >= .counts.max", - "end": true - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "Add Water", - "type": "operation", - "actions": [ - { - "functionRef": "Increment Current Count Function", - "actionDataFilter": { - "toStateData": ".counts.current" - } - } - ], - "transition": "Check if full" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/finalizeCollegeApplication.json b/parser/testdata/workflows/finalizeCollegeApplication.json deleted file mode 100644 index 9c93616..0000000 --- a/parser/testdata/workflows/finalizeCollegeApplication.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "id": "finalizeCollegeApplication", - "name": "Finalize College Application", - "version": "1.0", - "specVersion": "0.8", - "start": "FinalizeApplication", - "events": [ - { - "name": "ApplicationSubmitted", - "type": "org.application.submitted", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "SATScoresReceived", - "type": "org.application.satscores", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "RecommendationLetterReceived", - "type": "org.application.recommendationLetter", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - } - ], - "functions": [ - { - "name": "finalizeApplicationFunction", - "operation": "http://myapis.org/collegeapplicationapi.json#finalize" - } - ], - "states": [ - { - "name": "FinalizeApplication", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "ApplicationSubmitted", - "SATScoresReceived", - "RecommendationLetterReceived" - ], - "actions": [ - { - "functionRef": { - "refName": "finalizeApplicationFunction", - "arguments": { - "student": "${ .applicantId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml deleted file mode 100644 index 00f04f3..0000000 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -constants: "file://testdata/constantsDogs.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml deleted file mode 100644 index 27d00e1..0000000 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: "file://testdata/secrets.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml deleted file mode 100644 index 2f64a98..0000000 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: - - NAME -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .NAME }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml deleted file mode 100644 index 015a711..0000000 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: custom.greeting -version: '1.0' -specVersion: "0.8" -description: Greet Someone -# name: Greeting example #can be empty -# start: Greet #can be empty -functions: - - name: greetingCustomFunction - operation: /path/to/my/script/greeting.ts#CustomGreeting - # Support custom function type definition - type: custom - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText - type: graphql - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting - - name: StoreBidFunction - operation: http://myapis.org/inboxapi.json#storeBidFunction - - name: callCreditCheckMicroservice - operation: http://myapis.org/inboxapi.json#callCreditCheckMicroservice -events: - - name: StoreBidFunction - type: StoreBidFunction - source: StoreBidFunction - - name: CarBidEvent - type: typeCarBidEvent - source: sourceCarBidEvent - - name: visaApprovedEventRef - type: typeVisaApprovedEventRef - source: sourceVisaApprovedEventRef - - name: visaRejectedEvent - type: typeVisaRejectedEvent - source: sourceVisaRejectedEvent -states: - - name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid - - name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: funcref1 - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 48 - name: eventRefName - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec - - name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased - - name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - defaultCondition: - transition: HandleNoVisaDecision - - name: CheckApplicationSwitchDataBased - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: - nextState: StartApplication - defaultCondition: - transition: RejectApplication - timeouts: - stateExecTimeout: - total: PT1S - single: PT2S - - name: GreetSequential - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" - timeouts: - actionExecTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - stateDataFilter: - dataOutputPath: "${ .greeting }" - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject - - name: HelloInject - type: inject - data: - result: Hello World, last state! - boolValue: false - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: CheckCreditCallback - - name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - argsObj: { - "name" : "hi", - "age": 10 - } - time: 48 - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT150M - eventTimeout: PT34S - stateExecTimeout: - total: PT115M - single: PT22M - transition: WaitForCompletionSleep - - name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true - - name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: RejectApplication - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: HandleNoVisaDecision - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: StartApplication - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleApprovedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleRejectedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json deleted file mode 100644 index 8adeeb6..0000000 --- a/parser/testdata/workflows/greetings.sw.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ .greeting }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml deleted file mode 100644 index 8f5447b..0000000 --- a/parser/testdata/workflows/greetings.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - id: idx - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json deleted file mode 100644 index 9a434d4..0000000 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "name": "Greeting Workflow", - "description": "Greet Someone", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "SleepHere", - "type": "sleep", - "timeouts": { - "stateExecTimeout": "PT10S" - }, - "duration": "PT40S", - "transition": "Greet" - }, - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "parameters": { - "name": "${ .person | .name }" - } - }, - "actionDataFilter": { - "toStateData": "${ .greeting }" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/handleCarAuctionBid.json b/parser/testdata/workflows/handleCarAuctionBid.json deleted file mode 100644 index 6df46b0..0000000 --- a/parser/testdata/workflows/handleCarAuctionBid.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "handleCarAuctionBid", - "version": "1.0", - "specVersion": "0.8", - "name": "Car Auction Bidding Workflow", - "description": "Store a single bid whole the car auction is active", - "start": { - "stateName": "StoreCarAuctionBid", - "schedule": "R/PT2H" - }, - "functions": [ - { - "name": "StoreBidFunction", - "operation": "http://myapis.org/carauctionapi.json#storeBid" - } - ], - "events": [ - { - "name": "CarBidEvent", - "type": "carBidMadeType", - "source": "carBidEventSource" - } - ], - "states": [ - { - "name": "StoreCarAuctionBid", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "CarBidEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "StoreBidFunction", - "arguments": { - "bid": "${ .bid }" - } - } - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/helloworld.json b/parser/testdata/workflows/helloworld.json deleted file mode 100644 index 707b6ef..0000000 --- a/parser/testdata/workflows/helloworld.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "id": "helloworld", - "version": "1.0", - "specVersion": "0.8", - "name": "Hello World Workflow", - "description": "Inject Hello World", - "start": "Hello State", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/jobmonitoring.json b/parser/testdata/workflows/jobmonitoring.json deleted file mode 100644 index a11282b..0000000 --- a/parser/testdata/workflows/jobmonitoring.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "id": "jobmonitoring", - "version": "1.0", - "specVersion": "0.8", - "name": "Job Monitoring", - "description": "Monitor finished execution of a submitted job", - "start": "SubmitJob", - "functions": [ - { - "name": "submitJob", - "operation": "http://myapis.org/monitorapi.json#doSubmit" - }, - { - "name": "checkJobStatus", - "operation": "http://myapis.org/monitorapi.json#checkStatus" - }, - { - "name": "reportJobSuceeded", - "operation": "http://myapis.org/monitorapi.json#reportSucceeded" - }, - { - "name": "reportJobFailed", - "operation": "http://myapis.org/monitorapi.json#reportFailure" - } - ], - "states": [ - { - "name": "SubmitJob", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "submitJob", - "arguments": { - "name": "${ .job.name }" - } - }, - "actionDataFilter": { - "results": "${ .jobuid }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobuid }" - }, - "transition": "WaitForCompletion" - }, - { - "name": "WaitForCompletion", - "type": "sleep", - "duration": "PT5S", - "transition": "GetJobStatus" - }, - { - "name": "GetJobStatus", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "checkJobStatus", - "arguments": { - "name": "${ .jobuid }" - } - }, - "actionDataFilter": { - "results": "${ .jobstatus }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobstatus }" - }, - "transition": "DetermineCompletion" - }, - { - "name": "DetermineCompletion", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .jobStatus == \"SUCCEEDED\" }", - "transition": "JobSucceeded" - }, - { - "condition": "${ .jobStatus == \"FAILED\" }", - "transition": "JobFailed" - } - ], - "defaultCondition": { - "transition": "WaitForCompletion" - } - }, - { - "name": "JobSucceeded", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobSuceeded", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - }, - { - "name": "JobFailed", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobFailed", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/onboardcustomer.json b/parser/testdata/workflows/onboardcustomer.json deleted file mode 100644 index 85cb0d6..0000000 --- a/parser/testdata/workflows/onboardcustomer.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "id": "onboardcustomer", - "version": "1.0", - "specVersion": "0.8", - "name": "Onboard Customer", - "description": "Onboard a Customer", - "start": "Onboard", - "states": [ - { - "name": "Onboard", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "invoke": "async", - "onParentComplete": "continue", - "workflowId": "customeronboardingworkflow", - "version": "1.0" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/parallelexec.json b/parser/testdata/workflows/parallelexec.json deleted file mode 100644 index 7e33893..0000000 --- a/parser/testdata/workflows/parallelexec.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "parallelexec", - "version": "1.0", - "specVersion": "0.8", - "name": "Parallel Execution Workflow", - "description": "Executes two branches in parallel", - "start": "ParallelExec", - "states": [ - { - "name": "ParallelExec", - "type": "parallel", - "completionType": "allOf", - "branches": [ - { - "name": "ShortDelayBranch", - "actions": [ - { - "subFlowRef": "shortdelayworkflowid" - } - ] - }, - { - "name": "LongDelayBranch", - "actions": [ - { - "subFlowRef": "longdelayworkflowid" - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientVitalsWorkflow.json b/parser/testdata/workflows/patientVitalsWorkflow.json deleted file mode 100644 index a4fd8b5..0000000 --- a/parser/testdata/workflows/patientVitalsWorkflow.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "id": "patientVitalsWorkflow", - "name": "Monitor Patient Vitals", - "version": "1.0", - "specVersion": "0.8", - "start": "MonitorVitals", - "events": [ - { - "name": "HighBodyTemperature", - "type": "org.monitor.highBodyTemp", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighBloodPressure", - "type": "org.monitor.highBloodPressure", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighRespirationRate", - "type": "org.monitor.highRespirationRate", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - } - ], - "functions": [ - { - "name": "callPulmonologist", - "operation": "http://myapis.org/patientapis.json#callPulmonologist" - }, - { - "name": "sendTylenolOrder", - "operation": "http://myapis.org/patientapis.json#tylenolOrder" - }, - { - "name": "callNurse", - "operation": "http://myapis.org/patientapis.json#callNurse" - } - ], - "states": [ - { - "name": "MonitorVitals", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "HighBodyTemperature" - ], - "actions": [ - { - "functionRef": { - "refName": "sendTylenolOrder", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighBloodPressure" - ], - "actions": [ - { - "functionRef": { - "refName": "callNurse", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighRespirationRate" - ], - "actions": [ - { - "functionRef": { - "refName": "callPulmonologist", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml deleted file mode 100644 index 6ceb1a1..0000000 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: patientonboarding -name: Patient Onboarding Workflow -version: "1.0" -specVersion: "0.8" -start: Onboard -states: - - name: Onboard - type: event - onEvents: - - eventRefs: - - NewPatientEvent - actions: - - functionRef: StorePatient - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: AssignDoctor - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: ScheduleAppt - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - onErrors: - - errorRef: ServiceNotAvailable - end: true - end: true -events: - - name: NewPatientEvent - type: new.patients.event - source: newpatient/+ -functions: - - name: StorePatient - operation: api/services.json#storePatient - - name: StoreNewPatientInfo - operation: api/services.json#addPatient - - name: AssignDoctor - operation: api/services.json#assignDoctor - - name: ScheduleAppt - operation: api/services.json#scheduleAppointment -errors: - - name: ServiceNotAvailable - code: "503" -retries: - - name: ServicesNotAvailableRetryStrategy - delay: PT3S - maxAttempts: 10 - jitter: 0.0 - multiplier: 1.1 \ No newline at end of file diff --git a/parser/testdata/workflows/paymentconfirmation.json b/parser/testdata/workflows/paymentconfirmation.json deleted file mode 100644 index 815a73c..0000000 --- a/parser/testdata/workflows/paymentconfirmation.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "paymentconfirmation", - "version": "1.0", - "specVersion": "0.8", - "name": "Payment Confirmation Workflow", - "description": "Performs Payment Confirmation", - "functions": "file://functiondefs.json", - "events": "file://eventdefs.yml", - "states": [ - { - "name": "PaymentReceived", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "PaymentReceivedEvent" - ], - "actions": [ - { - "name": "checkfunds", - "functionRef": { - "refName": "checkFundsAvailability", - "arguments": { - "account": "${ .accountId }", - "paymentamount": "${ .payment.amount }" - } - } - } - ] - } - ], - "transition": "ConfirmBasedOnFunds" - }, - { - "name": "ConfirmBasedOnFunds", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .funds | .available == \"true\" }", - "transition": "SendPaymentSuccess" - }, - { - "condition": "${ .funds | .available == \"false\" }", - "transition": "SendInsufficientResults" - } - ], - "defaultCondition": { - "transition": "SendPaymentSuccess" - } - }, - { - "name": "SendPaymentSuccess", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendSuccessEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - }, - { - "name": "SendInsufficientResults", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendInsufficientFundsEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json deleted file mode 100644 index 7496b32..0000000 --- a/parser/testdata/workflows/provisionorders.sw.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "id": "provisionorders", - "version": "1.0", - "specVersion": "0.8", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": "ProvisionOrder", - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ], - "states": [ - { - "name": "ProvisionOrder", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .order }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .exceptions }" - }, - "transition": "ApplyOrder", - "onErrors": [ - { - "errorRef": "Missing order id", - "transition": "MissingId" - }, - { - "errorRef": "Missing order item", - "transition": "MissingItem" - }, - { - "errorRef": "Missing order quantity", - "transition": "MissingQuantity" - } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingIdExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingItemExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingQuantityExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ - { - "subFlowRef": "applyOrderWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json deleted file mode 100644 index 2596b04..0000000 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "id": "order", - "name": "Purchase Order Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "StartNewOrder", - "timeouts": { - "workflowExecTimeout": { - "duration": "P30D", - "runBefore": "CancelOrder" - } - }, - "states": [ - { - "name": "StartNewOrder", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderCreatedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogNewOrderCreated" - } - } - ] - } - ], - "transition": { - "nextState": "WaitForOrderConfirmation" - } - }, - { - "name": "WaitForOrderConfirmation", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderConfirmedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderConfirmed" - } - } - ] - } - ], - "transition": { - "nextState": "WaitOrderShipped" - } - }, - { - "name": "WaitOrderShipped", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "ShipmentSentEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderShipped" - } - } - ] - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderFinishedEvent" - } - ] - } - }, - { - "name": "CancelOrder", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "CancelOrder" - } - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderCancelledEvent" - } - ] - } - } - ], - "events": [ - { - "name": "OrderCreatedEvent", - "type": "my.company.orders", - "source": "/orders/new", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderConfirmedEvent", - "type": "my.company.orders", - "source": "/orders/confirmed", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "ShipmentSentEvent", - "type": "my.company.orders", - "source": "/orders/shipped", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderFinishedEvent", - "type": "my.company.orders", - "kind": "produced" - }, - { - "name": "OrderCancelledEvent", - "type": "my.company.orders", - "kind": "produced" - } - ], - "functions": [ - { - "name": "LogNewOrderCreated", - "operation": "http.myorg.io/ordersservices.json#logcreated" - }, - { - "name": "LogOrderConfirmed", - "operation": "http.myorg.io/ordersservices.json#logconfirmed" - }, - { - "name": "LogOrderShipped", - "operation": "http.myorg.io/ordersservices.json#logshipped" - }, - { - "name": "CancelOrder", - "operation": "http.myorg.io/ordersservices.json#calcelorder" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json deleted file mode 100644 index 9040643..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": "file://testdata/timeouts.json", - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json deleted file mode 100644 index 90c7c62..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.sw.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } - }, - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "TemperatureEvent", - "HumidityEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcloudeventonprovision.json b/parser/testdata/workflows/sendcloudeventonprovision.json deleted file mode 100644 index 7e5bc37..0000000 --- a/parser/testdata/workflows/sendcloudeventonprovision.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "sendcloudeventonprovision", - "version": "1.0", - "specVersion": "0.8", - "name": "Send CloudEvent on provision completion", - "start": "ProvisionOrdersState", - "events": [ - { - "name": "provisioningCompleteEvent", - "type": "provisionCompleteType", - "kind": "produced" - } - ], - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioning.json#doProvision" - } - ], - "states": [ - { - "name": "ProvisionOrdersState", - "type": "foreach", - "inputCollection": "${ .orders }", - "iterationParam": "singleorder", - "outputCollection": "${ .provisionedOrders }", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .singleorder }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "provisioningCompleteEvent", - "data": "${ .provisionedOrders }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcustomeremail.json b/parser/testdata/workflows/sendcustomeremail.json deleted file mode 100644 index 7e8d010..0000000 --- a/parser/testdata/workflows/sendcustomeremail.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id": "sendcustomeremail", - "version": "1.0", - "specVersion": "0.8", - "name": "Send customer email workflow", - "description": "Send email to a customer", - "start": "Send Email", - "functions": [ - { - "name": "emailFunction", - "operation": "file://myapis/emailapis.json#sendEmail" - } - ], - "states": [ - { - "name": "Send Email", - "type": "operation", - "actions": [ - { - "functionRef": { - "invoke": "async", - "refName": "emailFunction", - "arguments": { - "customer": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/solvemathproblems.json b/parser/testdata/workflows/solvemathproblems.json deleted file mode 100644 index a3083d0..0000000 --- a/parser/testdata/workflows/solvemathproblems.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "id": "solvemathproblems", - "version": "1.0", - "specVersion": "0.8", - "name": "Solve Math Problems Workflow", - "description": "Solve math problems", - "start": "Solve", - "functions": [ - { - "name": "solveMathExpressionFunction", - "operation": "http://myapis.org/mapthapis.json#solveExpression" - } - ], - "states": [ - { - "name": "Solve", - "type": "foreach", - "inputCollection": "${ .expressions }", - "iterationParam": "singleexpression", - "outputCollection": "${ .results }", - "actions": [ - { - "functionRef": { - "refName": "solveMathExpressionFunction", - "arguments": { - "expression": "${ .singleexpression }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .results }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.json b/parser/testdata/workflows/urifiles/auth.json deleted file mode 100644 index ff211df..0000000 --- a/parser/testdata/workflows/urifiles/auth.json +++ /dev/null @@ -1,17 +0,0 @@ -[ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } -] \ No newline at end of file diff --git a/parser/testdata/workflows/vitalscheck.json b/parser/testdata/workflows/vitalscheck.json deleted file mode 100644 index 3a89b78..0000000 --- a/parser/testdata/workflows/vitalscheck.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "vitalscheck", - "name": "Car Vitals Check", - "version": "1.0", - "specVersion": "0.8", - "start": "CheckVitals", - "states": [ - { - "name": "CheckVitals", - "type": "operation", - "actions": [ - { - "functionRef": "Check Tire Pressure" - }, - { - "functionRef": "Check Oil Pressure" - }, - { - "functionRef": "Check Coolant Level" - }, - { - "functionRef": "Check Battery" - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "DisplayChecksOnDashboard", - "data": "${ .evaluations }" - } - ] - } - } - ], - "functions": [ - { - "name": "Check Tire Pressure", - "operation": "mycarservices.json#checktirepressure" - }, - { - "name": "Check Oil Pressure", - "operation": "mycarservices.json#checkoilpressure" - }, - { - "name": "Check Coolant Level", - "operation": "mycarservices.json#checkcoolantlevel" - }, - { - "name": "Check Battery", - "operation": "mycarservices.json#checkbattery" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json deleted file mode 100644 index c0b72c8..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "P1S", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json deleted file mode 100644 index e72712d..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json deleted file mode 100644 index d01c35e..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json deleted file mode 100644 index 101b9bf..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/test/path.go b/test/path.go deleted file mode 100644 index 69c7113..0000000 --- a/test/path.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package test - -import ( - "os" - "path/filepath" - "runtime" - "strings" - - "github.com/pkg/errors" -) - -// CurrentProjectPath get the project root path -func CurrentProjectPath() string { - path := currentFilePath() - - ppath, err := filepath.Abs(filepath.Join(filepath.Dir(path), "../")) - if err != nil { - panic(errors.Wrapf(err, "Get current project path with %s failed", path)) - } - - f, err := os.Stat(ppath) - if err != nil { - panic(errors.Wrapf(err, "Stat project path %v failed", ppath)) - } - - if f.Mode()&os.ModeSymlink != 0 { - fpath, err := os.Readlink(ppath) - if err != nil { - panic(errors.Wrapf(err, "Readlink from path %v failed", fpath)) - } - ppath = fpath - } - - return ppath -} - -func currentFilePath() string { - _, file, _, _ := runtime.Caller(1) - if strings.HasSuffix(file, "/") { - return file - } - println("Returning an empty string for currentFilePath since it's not a caller path: " + file) - return "" -} diff --git a/test/path_test.go b/test/path_test.go deleted file mode 100644 index 4ccb672..0000000 --- a/test/path_test.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package test - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestCurrentProjectPath(t *testing.T) { - t.Run("normal test", func(t *testing.T) { - path := CurrentProjectPath() - - // NOTE: the '/code' path is used with code pipeline. - // When code running in the pipeline, the codebase will copy to /home/code directory. - assert.Regexp(t, "(/sdk-go$)|(/code$)", path) - }) -} diff --git a/test/utils.go b/test/utils.go new file mode 100644 index 0000000..d478edc --- /dev/null +++ b/test/utils.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "sigs.k8s.io/yaml" +) + +func AssertYAMLEq(t *testing.T, expected, actual string) { + var expectedMap, actualMap map[string]interface{} + + // Unmarshal the expected YAML + err := yaml.Unmarshal([]byte(expected), &expectedMap) + assert.NoError(t, err, "failed to unmarshal expected YAML") + + // Unmarshal the actual YAML + err = yaml.Unmarshal([]byte(actual), &actualMap) + assert.NoError(t, err, "failed to unmarshal actual YAML") + + // Assert equality of the two maps + assert.Equal(t, expectedMap, actualMap, "YAML structures do not match") +} diff --git a/tools.mod b/tools.mod index 69ff48c..203ee14 100644 --- a/tools.mod +++ b/tools.mod @@ -1,6 +1,6 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v3 -go 1.19 +go 1.22 require ( github.com/google/addlicense v0.0.0-20210428195630-6d92264d7170 // indirect diff --git a/util/floatstr/floatstr.go b/util/floatstr/floatstr.go deleted file mode 100644 index 7481271..0000000 --- a/util/floatstr/floatstr.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "strconv" - "strings" - - "github.com/go-playground/validator/v10" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -// Float32OrString is a type that can hold a float32 or a string. -// implementation borrowed from apimachinary intstr package: https://github.com/kubernetes/apimachinery/blob/master/pkg/util/intstr/intstr.go -type Float32OrString struct { - Type Type `json:"type,omitempty"` - FloatVal float32 `json:"floatVal,omitempty"` - StrVal string `json:"strVal,omitempty"` -} - -// Type represents the stored type of Float32OrString. -type Type int64 - -const ( - // Float ... - Float Type = iota // The Float32OrString holds a float. - // String ... - String // The Float32OrString holds a string. -) - -// FromFloat creates an Float32OrString object with a float32 value. It is -// your responsibility not to call this method with a value greater -// than float32. -func FromFloat(val float32) Float32OrString { - return Float32OrString{Type: Float, FloatVal: val} -} - -// FromString creates a Float32OrString object with a string value. -func FromString(val string) Float32OrString { - return Float32OrString{Type: String, StrVal: val} -} - -// Parse the given string and try to convert it to a float32 before -// setting it as a string value. -func Parse(val string) Float32OrString { - f, err := strconv.ParseFloat(val, 32) - if err != nil { - return FromString(val) - } - return FromFloat(float32(f)) -} - -// UnmarshalJSON implements the json.Unmarshaller interface. -func (floatstr *Float32OrString) UnmarshalJSON(value []byte) error { - if value[0] == '"' { - floatstr.Type = String - return json.Unmarshal(value, &floatstr.StrVal) - } - floatstr.Type = Float - return json.Unmarshal(value, &floatstr.FloatVal) -} - -// MarshalJSON implements the json.Marshaller interface. -func (floatstr *Float32OrString) MarshalJSON() ([]byte, error) { - switch floatstr.Type { - case Float: - return json.Marshal(floatstr.FloatVal) - case String: - return json.Marshal(floatstr.StrVal) - default: - return []byte{}, fmt.Errorf("impossible Float32OrString.Type") - } -} - -// String returns the string value, or the float value. -func (floatstr *Float32OrString) String() string { - if floatstr == nil { - return "" - } - if floatstr.Type == String { - return floatstr.StrVal - } - return strconv.FormatFloat(float64(floatstr.FloatValue()), 'E', -1, 32) -} - -// FloatValue returns the FloatVal if type float32, or if -// it is a String, will attempt a conversion to float32, -// returning 0 if a parsing error occurs. -func (floatstr *Float32OrString) FloatValue() float32 { - if floatstr.Type == String { - f, _ := strconv.ParseFloat(floatstr.StrVal, 32) - return float32(f) - } - return floatstr.FloatVal -} - -func init() { - val.GetValidator().RegisterCustomTypeFunc(func(fl reflect.Value) interface{} { - if fl.Kind() != reflect.Struct { - return errors.New("invalid type: expected Float32OrString") - } - - // Get the Float32OrString value - _, ok := fl.Interface().(Float32OrString) - if !ok { - return fmt.Errorf("invalid type: expected Float32OrString") - } - - return nil - }, Float32OrString{}) -} - -func ValidateFloat32OrString(sl validator.StructLevel) { - // Get the current struct being validated. - current := sl.Current() - - for i := 0; i < current.NumField(); i++ { - field := current.Type().Field(i) - value := current.Field(i) - - // Check if the field is a pointer and handle nil pointers. - if value.Kind() == reflect.Ptr { - if value.IsNil() { - continue // Skip nil pointers. - } - value = value.Elem() // Dereference the pointer. - } - - // Check if the field is of type Float32OrString. - if value.Type() == reflect.TypeOf(Float32OrString{}) { - // Extract validation tags from the field. - tags := field.Tag.Get("validate") - - // Split tags and look for min/max. - tagList := strings.Split(tags, ",") - for _, tag := range tagList { - if strings.HasPrefix(tag, "min=") { - minVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "min="), 32) - if err != nil { - sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") - continue - } - - if value.FieldByName("FloatVal").Float() < minVal { - sl.ReportError(value.Interface(), field.Name, field.Name, "min", "") - } - } - - if strings.HasPrefix(tag, "max=") { - maxVal, err := strconv.ParseFloat(strings.TrimPrefix(tag, "max="), 32) - if err != nil { - sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") - continue - } - - if value.FieldByName("FloatVal").Float() > maxVal { - sl.ReportError(value.Interface(), field.Name, field.Name, "max", "") - } - } - } - } - } -} diff --git a/util/floatstr/floatstr_test.go b/util/floatstr/floatstr_test.go deleted file mode 100644 index ee25fbe..0000000 --- a/util/floatstr/floatstr_test.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "reflect" - "testing" - - "k8s.io/apimachinery/pkg/util/yaml" -) - -func TestFromFloat(t *testing.T) { - i := FromFloat(93.93) - if i.Type != Float || i.FloatVal != 93.93 { - t.Errorf("Expected FloatVal=93.93, got %+v", i) - } -} - -func TestFromString(t *testing.T) { - i := FromString("76.76") - if i.Type != String || i.StrVal != "76.76" { - t.Errorf("Expected StrVal=\"76.76\", got %+v", i) - } -} - -type FloatOrStringHolder struct { - FOrS Float32OrString `json:"val"` -} - -func TestIntOrStringUnmarshalJSON(t *testing.T) { - cases := []struct { - input string - result Float32OrString - }{ - {"{\"val\": 123.123}", FromFloat(123.123)}, - {"{\"val\": \"123.123\"}", FromString("123.123")}, - } - - for _, c := range cases { - var result FloatOrStringHolder - if err := json.Unmarshal([]byte(c.input), &result); err != nil { - t.Errorf("Failed to unmarshal input '%v': %v", c.input, err) - } - if result.FOrS != c.result { - t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result) - } - } -} - -func TestIntOrStringMarshalJSON(t *testing.T) { - cases := []struct { - input Float32OrString - result string - }{ - {FromFloat(123.123), "{\"val\":123.123}"}, - {FromString("123.123"), "{\"val\":\"123.123\"}"}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - result, err := json.Marshal(&input) - if err != nil { - t.Errorf("Failed to marshal input '%v': %v", input, err) - } - if string(result) != c.result { - t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result)) - } - } -} - -func TestIntOrStringMarshalJSONUnmarshalYAML(t *testing.T) { - cases := []struct { - input Float32OrString - }{ - {FromFloat(123.123)}, - {FromString("123.123")}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - jsonMarshalled, err := json.Marshal(&input) - if err != nil { - t.Errorf("1: Failed to marshal input: '%v': %v", input, err) - } - - var result FloatOrStringHolder - err = yaml.Unmarshal(jsonMarshalled, &result) - if err != nil { - t.Errorf("2: Failed to unmarshal '%+v': %v", string(jsonMarshalled), err) - } - - if !reflect.DeepEqual(input, result) { - t.Errorf("3: Failed to marshal input '%+v': got %+v", input, result) - } - } -} diff --git a/util/unmarshal.go b/util/unmarshal.go deleted file mode 100644 index d00e9d2..0000000 --- a/util/unmarshal.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2020 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "net/http" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "sync/atomic" - "time" - - "sigs.k8s.io/yaml" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -// Kind ... -// +k8s:deepcopy-gen=false -type Kind interface { - KindValues() []string - String() string -} - -// TODO: Remove global variable -var HttpClient = http.Client{Timeout: time.Duration(1) * time.Second} - -// UnmarshalError ... -// +k8s:deepcopy-gen=false -type UnmarshalError struct { - err error - parameterName string - primitiveType reflect.Kind - objectType reflect.Kind -} - -func (e *UnmarshalError) Error() string { - if e.err == nil { - panic("unmarshalError fail") - } - - var syntaxErr *json.SyntaxError - var unmarshalTypeErr *json.UnmarshalTypeError - if errors.As(e.err, &syntaxErr) { - return fmt.Sprintf("%s has a syntax error %q", e.parameterName, syntaxErr.Error()) - - } else if errors.As(e.err, &unmarshalTypeErr) { - return e.unmarshalMessageError(unmarshalTypeErr) - } - - return e.err.Error() -} - -func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) string { - if err.Struct == "" && err.Field == "" { - primitiveTypeName := e.primitiveType.String() - - // in some cases the e.primitiveType might be invalid, one of the reasons is because it is nil - // default to string in that case - if e.primitiveType == reflect.Invalid { - primitiveTypeName = "string" - } - - var objectTypeName string - if e.objectType != reflect.Invalid { - switch e.objectType { - case reflect.Struct: - objectTypeName = "object" - case reflect.Map: - objectTypeName = "object" - case reflect.Slice: - objectTypeName = "array" - default: - objectTypeName = e.objectType.String() - } - } - return fmt.Sprintf("%s must be %s or %s", e.parameterName, primitiveTypeName, objectTypeName) - - } else if err.Struct != "" && err.Field != "" { - var primitiveTypeName string - value := reflect.New(err.Type) - if valKinds, ok := value.Elem().Interface().(val.Kind); ok { - values := valKinds.KindValues() - if len(values) <= 2 { - primitiveTypeName = strings.Join(values, " or ") - } else { - primitiveTypeName = fmt.Sprintf("%s, %s", strings.Join(values[:len(values)-2], ", "), strings.Join(values[len(values)-2:], " or ")) - } - } else { - primitiveTypeName = err.Type.Name() - } - - return fmt.Sprintf("%s.%s must be %s", e.parameterName, err.Field, primitiveTypeName) - } - - return err.Error() -} - -func LoadExternalResource(url string) (b []byte, err error) { - index := strings.Index(url, "://") - if index == -1 { - b, err = getBytesFromFile(url) - } else { - scheme := url[:index] - switch scheme { - case "http", "https": - b, err = getBytesFromHttp(url) - case "file": - b, err = getBytesFromFile(url[index+3:]) - default: - return nil, fmt.Errorf("unsupported scheme: %q", scheme) - } - } - if err != nil { - return - } - - // TODO: optimize this - // NOTE: In specification, we can declare independent definitions with another file format, so - // we must convert independently yaml source to json format data before unmarshal. - if !json.Valid(b) { - b, err = yaml.YAMLToJSON(b) - if err != nil { - return nil, err - } - return b, nil - } - - return b, nil -} - -func getBytesFromFile(path string) ([]byte, error) { - if WebAssembly() { - return nil, fmt.Errorf("unsupported open file") - } - - // if path is relative, search in include paths - if !filepath.IsAbs(path) { - paths := IncludePaths() - pathFound := false - for i := 0; i < len(paths) && !pathFound; i++ { - sn := filepath.Join(paths[i], path) - _, err := os.Stat(sn) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - return nil, err - } - } else { - path = sn - pathFound = true - } - } - if !pathFound { - return nil, fmt.Errorf("file not found: %q", path) - } - } - - return os.ReadFile(filepath.Clean(path)) -} - -func getBytesFromHttp(url string) ([]byte, error) { - req, err := http.NewRequest(http.MethodGet, url, nil) - if err != nil { - return nil, err - } - - resp, err := HttpClient.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - buf := new(bytes.Buffer) - if _, err = buf.ReadFrom(resp.Body); err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -// +k8s:deepcopy-gen=false -func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { - var valString string - err := UnmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) - if err != nil || valString == "" { - return err - } - - // Assumes that the value inside `data` is a path to a known location. - // Returns the content of the file or a not nil error reference. - data, err = LoadExternalResource(valString) - if err != nil { - return err - } - - data = bytes.TrimSpace(data) - if data[0] != '{' && data[0] != '[' { - return errors.New("invalid external resource definition") - } - - if data[0] == '[' && parameterName != "auth" && parameterName != "secrets" { - return errors.New("invalid external resource definition") - } - - data = bytes.TrimSpace(data) - if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" && parameterName != "schema" { - extractData := map[string]json.RawMessage{} - err = json.Unmarshal(data, &extractData) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*valObject).Kind(), - } - } - - var ok bool - if data, ok = extractData[parameterName]; !ok { - return fmt.Errorf("external resource parameter not found: %q", parameterName) - } - } - - return UnmarshalObject(parameterName, data, valObject) -} - -func UnmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("%s no bytes to unmarshal", parameterName) - } - - isObject := data[0] == '{' || data[0] == '[' - var err error - if isObject { - err = UnmarshalObject(parameterName, data, valStruct) - } else { - err = unmarshalPrimitive(parameterName, data, valPrimitive) - } - - var unmarshalError *UnmarshalError - if errors.As(err, &unmarshalError) { - unmarshalError.objectType = reflect.TypeOf(*valStruct).Kind() - unmarshalError.primitiveType = reflect.TypeOf(*valPrimitive).Kind() - } - - return err -} - -func unmarshalPrimitive[T string | bool](parameterName string, data []byte, value *T) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -func UnmarshalObject[U any](parameterName string, data []byte, value *U) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - objectType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -var defaultIncludePaths atomic.Value - -func init() { - // No execute set include path to suport webassembly - if WebAssembly() { - return - } - - wd, err := os.Getwd() - if err != nil { - panic(err) - } - SetIncludePaths([]string{wd}) -} - -// IncludePaths will return the search path for non-absolute import file -func IncludePaths() []string { - return defaultIncludePaths.Load().([]string) -} - -// SetIncludePaths will update the search path for non-absolute import file -func SetIncludePaths(paths []string) { - for _, path := range paths { - if !filepath.IsAbs(path) { - panic(fmt.Errorf("%s must be an absolute file path", path)) - } - } - - defaultIncludePaths.Store(paths) -} - -func WebAssembly() bool { - return runtime.GOOS == "js" && runtime.GOARCH == "wasm" -} diff --git a/util/unmarshal_benchmark_test.go b/util/unmarshal_benchmark_test.go deleted file mode 100644 index 1a81b41..0000000 --- a/util/unmarshal_benchmark_test.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "fmt" - "testing" -) - -func Benchmark_IncludePaths_Parallel(b *testing.B) { - b.RunParallel(func(p *testing.PB) { - i := 0 - for p.Next() { - IncludePaths() - SetIncludePaths([]string{fmt.Sprintf("%v", i)}) - i++ - } - }) -} diff --git a/util/unmarshal_test.go b/util/unmarshal_test.go deleted file mode 100644 index f7051fb..0000000 --- a/util/unmarshal_test.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/serverlessworkflow/sdk-go/v2/test" -) - -func TestIncludePaths(t *testing.T) { - assert.NotNil(t, IncludePaths()) - assert.True(t, len(IncludePaths()) > 0) - - // update include paths - initialPaths := IncludePaths() - paths := []string{"/root", "/path"} - SetIncludePaths(paths) - assert.Equal(t, IncludePaths(), paths) - - assert.PanicsWithError(t, "1 must be an absolute file path", assert.PanicTestFunc(func() { - SetIncludePaths([]string{"1"}) - })) - - SetIncludePaths(initialPaths) -} - -func Test_loadExternalResource(t *testing.T) { - SetIncludePaths(append(IncludePaths(), filepath.Join(test.CurrentProjectPath()))) - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte("{}")) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - data, err := LoadExternalResource(server.URL + "/test.json") - assert.NoError(t, err) - assert.Equal(t, "{}", string(data)) - - data, err = LoadExternalResource("parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("file://../parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("./parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - _, err = LoadExternalResource("ftp://test.yml") - assert.ErrorContains(t, err, "unsupported scheme: \"ftp\"") -} - -func Test_unmarshalObjectOrFile(t *testing.T) { - t.Run("httptest", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - type listStructString []structString - - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"listStructString":[{"fieldValue": "value"}]}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - structValue := &structString{} - data := []byte(`"fieldValue": "value"`) - err := UnmarshalObjectOrFile("structString", data, structValue) - assert.Error(t, err) - assert.Equal(t, &structString{}, structValue) - - listStructValue := &listStructString{} - data = []byte(`[{"fieldValue": "value"}]`) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - - listStructValue = &listStructString{} - data = []byte(fmt.Sprintf(`"%s/test.json"`, server.URL)) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - }) -} - -func Test_primitiveOrMapType(t *testing.T) { - type dataMap map[string]json.RawMessage - - t.Run("unmarshal", func(t *testing.T) { - var valBool bool - valMap := &dataMap{} - data := []byte(`"value":true`) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`{value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`true`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.True(t, valBool) - - valString := "" - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.Equal(t, `true`, valString) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) - assert.False(t, valBool) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value": "true"}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) - assert.False(t, valBool) - }) - - t.Run("test personalized syntaxError error message", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - - var valString string - valStruct := &structString{} - data := []byte(`{"fieldValue": "value"`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"unexpected end of JSON input\"", err.Error()) - - data = []byte(`{\n "fieldValue": value\n}`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"invalid character '\\\\\\\\' looking for beginning of object key string\"", err.Error()) - // assert.Equal(t, `structBool value '{"fieldValue": value}' is not supported, it has a syntax error "invalid character 'v' looking for beginning of value"`, err.Error()) - }) - - t.Run("test personalized unmarshalTypeError error message", func(t *testing.T) { - type structBool struct { - FieldValue bool `json:"fieldValue"` - } - - var valBool bool - valStruct := &structBool{} - data := []byte(`{ - "fieldValue": "true" -}`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool.fieldValue must be bool", err.Error()) - - valBool = false - valStruct = &structBool{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool must be bool or object", err.Error()) - }) - - t.Run("check json with spaces", func(t *testing.T) { - var valBool bool - valStruct := &dataMap{} - data := []byte(` {"value": "true"} `) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valBool = false - valStruct = &dataMap{} - data = []byte(` true `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valString := "" - valStruct = &dataMap{} - data = []byte(` "true" `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - }) - - t.Run("check tabs", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\t') + `"true"` + string('\t')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\t') + `true` + string('\t')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("check breakline", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\n') + `"true"` + string('\n')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\n') + `true` + string('\n')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("test recursivity and default value", func(t *testing.T) { - valStruct := &structBool{} - data := []byte(`{"fieldValue": false}`) - err := json.Unmarshal(data, valStruct) - assert.NoError(t, err) - assert.False(t, valStruct.FieldValue) - }) -} - -type structBool struct { - FieldValue bool `json:"fieldValue"` -} - -type structBoolUnmarshal structBool - -func (s *structBool) UnmarshalJSON(data []byte) error { - s.FieldValue = true - return UnmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) -} diff --git a/validator/tags.go b/validator/tags.go deleted file mode 100644 index e568aba..0000000 --- a/validator/tags.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -const ( - // TagISO8601Duration is the validate tag for iso8601 time duration format - TagISO8601Duration = "iso8601duration" -) diff --git a/validator/validator.go b/validator/validator.go deleted file mode 100644 index f241f84..0000000 --- a/validator/validator.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "context" - "errors" - "strconv" - - "github.com/relvacode/iso8601" - "github.com/sosodev/duration" - "k8s.io/apimachinery/pkg/util/intstr" - - validator "github.com/go-playground/validator/v10" -) - -// TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation - -type Kind interface { - KindValues() []string - String() string -} - -var validate *validator.Validate - -func init() { - validate = validator.New() - - err := validate.RegisterValidationCtx("iso8601duration", validateISO8601TimeDurationFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidationCtx("iso8601datetime", validateISO8601DatetimeFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidation("oneofkind", oneOfKind) - if err != nil { - panic(err) - } -} - -// GetValidator gets the default validator.Validate reference -func GetValidator() *validator.Validate { - return validate -} - -// ValidateISO8601TimeDuration validate the string is iso8601 duration format -func ValidateISO8601TimeDuration(s string) error { - if s == "" { - return errors.New("could not parse duration string") - } - _, err := duration.Parse(s) - if err != nil { - return errors.New("could not parse duration string") - } - return err -} - -func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601TimeDuration(fl.Field().String()) - return err == nil -} - -// ValidateISO8601Datetime validate the string is iso8601 Datetime format -func ValidateISO8601Datetime(s string) error { - _, err := iso8601.ParseString(s) - return err -} - -func validateISO8601DatetimeFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601Datetime(fl.Field().String()) - return err == nil -} - -func oneOfKind(fl validator.FieldLevel) bool { - if val, ok := fl.Field().Interface().(Kind); ok { - for _, value := range val.KindValues() { - if value == val.String() { - return true - } - } - } - - return false -} - -func ValidateGt0IntStr(value *intstr.IntOrString) bool { - switch value.Type { - case intstr.Int: - if value.IntVal <= 0 { - return false - } - case intstr.String: - v, err := strconv.Atoi(value.StrVal) - if err != nil { - return false - } - - if v <= 0 { - return false - } - } - - return true -} diff --git a/validator/validator_test.go b/validator/validator_test.go deleted file mode 100644 index daab56a..0000000 --- a/validator/validator_test.go +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestValidateISO8601TimeDuration(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "normal_all_designator", - s: "P3Y6M4DT12H30M5S", - err: ``, - }, - { - desp: "normal_second_designator", - s: "PT5S", - err: ``, - }, - { - desp: "fractional_second_designator", - s: "PT0.5S", - err: ``, - }, - { - desp: "empty value", - s: "", - err: `could not parse duration string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601TimeDuration(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestValidateISO8601Timestamp(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "workflow_spec_example", - s: "2021-11-05T08:15:30-05:00", - err: ``, - }, - { - desp: "datetime", - s: "2023-09-08T20:15:46+00:00", - err: ``, - }, - { - desp: "date", - s: "2023-09-08", - err: ``, - }, - { - desp: "time", - s: "13:15:33.074-07:00", - err: "iso8601: Unexpected character `:`", - }, - { - desp: "empty value", - s: "", - err: `iso8601: Cannot parse "": month 0 is not in range 1-12`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601Datetime(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -type testKind string - -func (k testKind) KindValues() []string { - return []string{"test1", "test2"} -} - -func (k testKind) String() string { - return string(k) -} - -type testKindInvalid string - -func (k testKindInvalid) AllValuesInvalid() []string { - return []string{"test1", "test2"} -} - -func (k testKindInvalid) String() string { - return string(k) -} - -func Test_oneOfKind(t *testing.T) { - validate := GetValidator() - - t.Run("kind without kindInvalid", func(t *testing.T) { - spec := struct { - f interface{} - t string - }{ - f: testKindInvalid("test1"), t: "oneofkind", - } - - errs := validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) - - t.Run("kind", func(t *testing.T) { - spec := struct { - f testKind - t string - }{ - f: testKind("test1"), t: "oneofkind", - } - errs := validate.Var(spec.f, spec.t) - assert.NoError(t, errs) - - spec = struct { - f testKind - t string - }{ - f: testKind("test3"), t: "oneofkind", - } - errs = validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) -} - -func TestValidateIntStr(t *testing.T) { - - testCase := []struct { - Desp string - Test *intstr.IntOrString - Return bool - }{ - { - Desp: "success int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - }, - Return: true, - }, - { - Desp: "success string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", - }, - Return: true, - }, - { - Desp: "fail int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - }, - Return: false, - }, - { - Desp: "fail string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", - }, - Return: false, - }, - { - Desp: "fail invalid string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "aa", - }, - Return: false, - }, - } - - for _, c := range testCase { - t.Run(c.Desp, func(t *testing.T) { - valid := ValidateGt0IntStr(c.Test) - assert.Equal(t, c.Return, valid) - }) - } -} diff --git a/validator/workflow.go b/validator/workflow.go deleted file mode 100644 index d5be7b5..0000000 --- a/validator/workflow.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "errors" - "fmt" - "reflect" - "strings" - - validator "github.com/go-playground/validator/v10" -) - -const ( - TagExists string = "exists" - TagRequired string = "required" - TagExclusive string = "exclusive" - - TagRecursiveState string = "recursivestate" - - // States referenced by compensatedBy (as well as any other states that they transition to) must obey following rules: - TagTransitionMainWorkflow string = "transtionmainworkflow" // They should not have any incoming transitions (should not be part of the main workflow control-flow logic) - TagCompensatedbyEventState string = "compensatedbyeventstate" // They cannot be an event state - TagRecursiveCompensation string = "recursivecompensation" // They cannot themselves set their compensatedBy property to true (compensation is not recursive) - TagCompensatedby string = "compensatedby" // They must define the usedForCompensation property and set it to true - TagTransitionUseForCompensation string = "transitionusedforcompensation" // They can transition only to states which also have their usedForCompensation property and set to true -) - -type WorkflowErrors []error - -func (e WorkflowErrors) Error() string { - errors := []string{} - for _, err := range []error(e) { - errors = append(errors, err.Error()) - } - return strings.Join(errors, "\n") -} - -func WorkflowError(err error) error { - if err == nil { - return nil - } - - var invalidErr *validator.InvalidValidationError - if errors.As(err, &invalidErr) { - return err - } - - var validationErrors validator.ValidationErrors - if !errors.As(err, &validationErrors) { - return err - } - - removeNamespace := []string{ - "BaseWorkflow", - "BaseState", - "OperationState", - } - - workflowErrors := []error{} - for _, err := range validationErrors { - // normalize namespace - namespaceList := strings.Split(err.Namespace(), ".") - normalizedNamespaceList := []string{} - for i := range namespaceList { - part := namespaceList[i] - if !contains(removeNamespace, part) { - part := strings.ToLower(part[:1]) + part[1:] - normalizedNamespaceList = append(normalizedNamespaceList, part) - } - } - namespace := strings.Join(normalizedNamespaceList, ".") - - switch err.Tag() { - case "unique": - if err.Param() == "" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate value", namespace)) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate %q", namespace, strings.ToLower(err.Param()))) - } - case "min": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must have the minimum %s", namespace, err.Param())) - case "required_without": - if namespace == "workflow.iD" { - workflowErrors = append(workflowErrors, errors.New("workflow.id required when \"workflow.key\" is not defined")) - } else if namespace == "workflow.key" { - workflowErrors = append(workflowErrors, errors.New("workflow.key required when \"workflow.id\" is not defined")) - } else if err.StructField() == "FunctionRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s required when \"eventRef\" or \"subFlowRef\" is not defined", namespace)) - } else { - workflowErrors = append(workflowErrors, err) - } - case "oneofkind": - value := reflect.New(err.Type()).Elem().Interface().(Kind) - workflowErrors = append(workflowErrors, fmt.Errorf("%s need by one of %s", namespace, value.KindValues())) - case "gt0": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must be greater than 0", namespace)) - case TagExists: - workflowErrors = append(workflowErrors, fmt.Errorf("%s don't exist %q", namespace, err.Value())) - case TagRequired: - workflowErrors = append(workflowErrors, fmt.Errorf("%s is required", namespace)) - case TagExclusive: - if err.StructField() == "ErrorRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s or %s are exclusive", namespace, replaceLastNamespace(namespace, "errorRefs"))) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s exclusive", namespace)) - } - case TagCompensatedby: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is not defined as usedForCompensation", namespace, err.Value())) - case TagCompensatedbyEventState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation and cannot be an event state", namespace, err.Value())) - case TagRecursiveCompensation: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation (cannot themselves set their compensatedBy)", namespace, err.Value())) - case TagRecursiveState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s can't no be recursive %q", namespace, strings.ToLower(err.Param()))) - case TagISO8601Duration: - workflowErrors = append(workflowErrors, fmt.Errorf("%s invalid iso8601 duration %q", namespace, err.Value())) - default: - workflowErrors = append(workflowErrors, err) - } - } - - return WorkflowErrors(workflowErrors) -} - -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} - -func replaceLastNamespace(namespace, replace string) string { - index := strings.LastIndex(namespace, ".") - if index == -1 { - return namespace - } - - return fmt.Sprintf("%s.%s", namespace[:index], replace) -} From 15e2f105eabb332acab5e509cf9e172adc258743 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 25 Mar 2025 10:07:01 -0300 Subject: [PATCH 100/110] Fix #222 - Basic raw implementation for DSL 1.0.0 (#224) * Fix #222 - (WIP): Basic raw implementation for DSL 1.0.0 Signed-off-by: Ricardo Zanini * Evaluate expressions, statusphase, schema validation, export, as, from Signed-off-by: Ricardo Zanini * Add raise task Signed-off-by: Ricardo Zanini * Task Do implementation and refactoring Signed-off-by: Ricardo Zanini * Upgrade Upload Artifact Signed-off-by: Ricardo Zanini * Add missing license headers Signed-off-by: Ricardo Zanini * Fix lint Signed-off-by: Ricardo Zanini * Add partial 'For' implementation Signed-off-by: Ricardo Zanini * Add implementation docs Signed-off-by: Ricardo Zanini * Solve lint issues Signed-off-by: Ricardo Zanini * Readd releases table to README Signed-off-by: Ricardo Zanini --------- Signed-off-by: Ricardo Zanini --- .github/workflows/Go-SDK-PR-Check.yaml | 4 +- README.md | 227 ++++++---- builder/builder_test.go | 6 +- expr/expr.go | 112 +++++ go.mod | 3 + go.sum | 10 + impl/context.go | 151 +++++++ impl/json_schema.go | 70 +++ impl/runner.go | 124 ++++++ impl/status_phase.go | 52 +++ impl/task_runner.go | 252 +++++++++++ impl/task_runner_do.go | 178 ++++++++ impl/task_runner_raise_test.go | 165 +++++++ impl/task_runner_test.go | 330 ++++++++++++++ impl/task_set_test.go | 416 ++++++++++++++++++ impl/testdata/chained_set_tasks.yaml | 29 ++ impl/testdata/concatenating_strings.yaml | 31 ++ impl/testdata/conditional_logic.yaml | 26 ++ .../conditional_logic_input_from.yaml | 25 ++ impl/testdata/for_colors.yaml | 28 ++ impl/testdata/raise_conditional.yaml | 32 ++ impl/testdata/raise_error_with_input.yaml | 27 ++ impl/testdata/raise_inline.yaml | 27 ++ impl/testdata/raise_reusable.yaml | 30 ++ impl/testdata/raise_undefined_reference.yaml | 23 + impl/testdata/sequential_set_colors.yaml | 31 ++ .../sequential_set_colors_output_as.yaml | 31 ++ impl/testdata/set_tasks_invalid_then.yaml | 27 ++ impl/testdata/set_tasks_with_termination.yaml | 27 ++ impl/testdata/set_tasks_with_then.yaml | 30 ++ impl/testdata/task_export_schema.yaml | 32 ++ impl/testdata/task_input_schema.yaml | 32 ++ impl/testdata/task_output_schema.yaml | 32 ++ ...task_output_schema_with_dynamic_value.yaml | 32 ++ impl/testdata/workflow_input_schema.yaml | 32 ++ impl/utils.go | 81 ++++ model/endpoint.go | 9 + model/endpoint_test.go | 10 +- model/errors.go | 324 ++++++++++++++ model/errors_test.go | 139 ++++++ model/extension_test.go | 2 +- model/objects.go | 79 ++++ model/runtime_expression.go | 23 +- model/task.go | 138 +++--- model/task_call.go | 20 + model/task_do.go | 4 + model/task_event.go | 8 + model/task_for.go | 4 + model/task_for_test.go | 3 +- model/task_fork.go | 4 + model/task_raise.go | 20 +- model/task_raise_test.go | 8 +- model/task_run.go | 4 + model/task_set.go | 4 + model/task_switch.go | 4 + model/task_test.go | 69 ++- model/task_try.go | 4 + model/task_wait.go | 4 + model/validator.go | 3 +- model/workflow.go | 5 + model/workflow_test.go | 18 +- parser/cmd/main.go | 3 +- 62 files changed, 3448 insertions(+), 230 deletions(-) create mode 100644 expr/expr.go create mode 100644 impl/context.go create mode 100644 impl/json_schema.go create mode 100644 impl/runner.go create mode 100644 impl/status_phase.go create mode 100644 impl/task_runner.go create mode 100644 impl/task_runner_do.go create mode 100644 impl/task_runner_raise_test.go create mode 100644 impl/task_runner_test.go create mode 100644 impl/task_set_test.go create mode 100644 impl/testdata/chained_set_tasks.yaml create mode 100644 impl/testdata/concatenating_strings.yaml create mode 100644 impl/testdata/conditional_logic.yaml create mode 100644 impl/testdata/conditional_logic_input_from.yaml create mode 100644 impl/testdata/for_colors.yaml create mode 100644 impl/testdata/raise_conditional.yaml create mode 100644 impl/testdata/raise_error_with_input.yaml create mode 100644 impl/testdata/raise_inline.yaml create mode 100644 impl/testdata/raise_reusable.yaml create mode 100644 impl/testdata/raise_undefined_reference.yaml create mode 100644 impl/testdata/sequential_set_colors.yaml create mode 100644 impl/testdata/sequential_set_colors_output_as.yaml create mode 100644 impl/testdata/set_tasks_invalid_then.yaml create mode 100644 impl/testdata/set_tasks_with_termination.yaml create mode 100644 impl/testdata/set_tasks_with_then.yaml create mode 100644 impl/testdata/task_export_schema.yaml create mode 100644 impl/testdata/task_input_schema.yaml create mode 100644 impl/testdata/task_output_schema.yaml create mode 100644 impl/testdata/task_output_schema_with_dynamic_value.yaml create mode 100644 impl/testdata/workflow_input_schema.yaml create mode 100644 impl/utils.go create mode 100644 model/errors.go create mode 100644 model/errors_test.go diff --git a/.github/workflows/Go-SDK-PR-Check.yaml b/.github/workflows/Go-SDK-PR-Check.yaml index 8d4da2f..9e9416c 100644 --- a/.github/workflows/Go-SDK-PR-Check.yaml +++ b/.github/workflows/Go-SDK-PR-Check.yaml @@ -93,7 +93,7 @@ jobs: run: go test ./... -coverprofile=test_coverage.out -covermode=atomic - name: Upload Coverage Report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Test Coverage Report path: test_coverage.out @@ -120,7 +120,7 @@ jobs: - name: Upload JUnit Report if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Integration Test JUnit Report path: ./integration-test-junit.xml diff --git a/README.md b/README.md index 786333e..9daabf0 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Go SDK for Serverless Workflow -The Go SDK for Serverless Workflow provides the [specification types](https://github.com/serverlessworkflow/specification/blob/v1.0.0-alpha5/schema/workflow.yaml) defined by the Serverless Workflow DSL in Go, making it easy to parse, validate, and interact with workflows. +The Go SDK for Serverless Workflow provides strongly-typed structures for the [Serverless Workflow specification](https://github.com/serverlessworkflow/specification/blob/v1.0.0/schema/workflow.yaml). It simplifies parsing, validating, and interacting with workflows in Go. Starting from version `v3.1.0`, the SDK also includes a partial reference implementation, allowing users to execute workflows directly within their Go applications. --- @@ -10,8 +10,11 @@ The Go SDK for Serverless Workflow provides the [specification types](https://gi - [Releases](#releases) - [Getting Started](#getting-started) - [Installation](#installation) + - [Basic Usage](#basic-usage) - [Parsing Workflow Files](#parsing-workflow-files) - [Programmatic Workflow Creation](#programmatic-workflow-creation) +- [Reference Implementation](#reference-implementation) + - [Example: Running a Workflow](#example-running-a-workflow) - [Slack Community](#slack-community) - [Contributing](#contributing) - [Code Style](#code-style) @@ -22,160 +25,190 @@ The Go SDK for Serverless Workflow provides the [specification types](https://gi ## Status -The current status of features implemented in the SDK is listed below: +This table indicates the current state of implementation of various SDK features: -| Feature | Status | -|-------------------------------------------- | ------------------ | -| Parse workflow JSON and YAML definitions | :heavy_check_mark: | -| Programmatically build workflow definitions | :heavy_check_mark: | -| Validate workflow definitions (Schema) | :heavy_check_mark: | -| Validate workflow definitions (Integrity) | :no_entry_sign: | -| Generate workflow diagram (SVG) | :no_entry_sign: | +| Feature | Status | +|-------------------------------------------- |---------------------| +| Parse workflow JSON and YAML definitions | :heavy_check_mark: | +| Programmatically build workflow definitions | :heavy_check_mark: | +| Validate workflow definitions (Schema) | :heavy_check_mark: | +| Specification Implementation | :heavy_check_mark:* | +| Validate workflow definitions (Integrity) | :no_entry_sign: | +| Generate workflow diagram (SVG) | :no_entry_sign: | + +> **Note**: *Implementation is partial; contributions are encouraged. --- ## Releases -| Latest Releases | Conformance to Spec Version | -|:--------------------------------------------------------------------------:|:------------------------------------------------------------------------:| -| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | -| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | -| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.4.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | -| [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0-alpha5) | +| Latest Releases | Conformance to Spec Version | +|:--------------------------------------------------------------------------:|:---------------------------------------------------------------------------------:| +| [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | +| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | +| [v2.4.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0) | --- -## Getting Started - -### Installation - -To use the SDK in your Go project, run the following command: - -```shell -$ go get github.com/serverlessworkflow/sdk-go/v3 -``` - -This will update your `go.mod` file to include the Serverless Workflow SDK as a dependency. - -Import the SDK in your Go file: - -```go -import "github.com/serverlessworkflow/sdk-go/v3/model" -``` - -You can now use the SDK types and functions, for example: +## Reference Implementation -```go -package main +The SDK provides a partial reference runner to execute your workflows: -import ( - "github.com/serverlessworkflow/sdk-go/v3/builder" - "github.com/serverlessworkflow/sdk-go/v3/model" -) +### Example: Running a Workflow -func main() { - workflowBuilder := New(). - SetDocument("1.0.0", "examples", "example-workflow", "1.0.0"). - AddTask("task1", &model.CallHTTP{ - TaskBase: model.TaskBase{ - If: &model.RuntimeExpression{Value: "${condition}"}, - }, - Call: "http", - With: model.HTTPArguments{ - Method: "GET", - Endpoint: model.NewEndpoint("http://example.com"), - }, - }) - workflow, _ := builder.Object(workflowBuilder) - // use your models -} +Below is a simple YAML workflow that sets a message and then prints it: +```yaml +document: + dsl: "1.0.0" + namespace: "examples" + name: "simple-workflow" + version: "1.0.0" +do: + - set: + message: "Hello from the Serverless Workflow SDK in Go!" ``` -### Parsing Workflow Files +You can execute this workflow using the following Go program: -The Serverless Workflow Specification supports YAML and JSON files. Use the following example to parse a workflow file into a Go data structure: +Example of executing a workflow defined in YAML: ```go package main import ( - "github.com/serverlessworkflow/sdk-go/v3/model" + "fmt" + "os" + "path/filepath" + + "github.com/serverlessworkflow/sdk-go/v3/impl" "github.com/serverlessworkflow/sdk-go/v3/parser" ) -func ParseWorkflow(filePath string) (*model.Workflow, error) { - workflow, err := parser.FromFile(filePath) +func RunWorkflow(workflowFilePath string, input map[string]interface{}) (interface{}, error) { + data, err := os.ReadFile(filepath.Clean(workflowFilePath)) + if err != nil { + return nil, err + } + workflow, err := parser.FromYAMLSource(data) if err != nil { return nil, err } - return workflow, nil -} -``` -This `Workflow` structure can then be used programmatically in your application. + runner := impl.NewDefaultRunner(workflow) + output, err := runner.Run(input) + if err != nil { + return nil, err + } + return output, nil +} -### Programmatic Workflow Creation +func main() { + output, err := RunWorkflow("./myworkflow.yaml", map[string]interface{}{"shouldCall": true}) + if err != nil { + panic(err) + } + fmt.Printf("Workflow completed with output: %v\n", output) +} +``` -Support for building workflows programmatically is planned for future releases. Stay tuned for updates in upcoming versions. +### Implementation Roadmap + +The table below lists the current state of this implementation. This table is a roadmap for the project based on the [DSL Reference doc](https://github.com/serverlessworkflow/specification/blob/v1.0.0/dsl-reference.md). + +| Feature | State | +| ----------- | --------------- | +| Workflow Document | βœ… | +| Workflow Use | 🟑 | +| Workflow Schedule | ❌ | +| Task Call | ❌ | +| Task Do | βœ… | +| Task Emit | ❌ | +| Task For | ❌ | +| Task Fork | ❌ | +| Task Listen | ❌ | +| Task Raise | βœ… | +| Task Run | ❌ | +| Task Set | βœ… | +| Task Switch | ❌ | +| Task Try | ❌ | +| Task Wait | ❌ | +| Lifecycle Events | 🟑 | +| External Resource | ❌ | +| Authentication | ❌ | +| Catalog | ❌ | +| Extension | ❌ | +| Error | βœ… | +| Event Consumption Strategies | ❌ | +| Retry | ❌ | +| Input | βœ… | +| Output | βœ… | +| Export | βœ… | +| Timeout | ❌ | +| Duration | ❌ | +| Endpoint | βœ… | +| HTTP Response | ❌ | +| HTTP Request | ❌ | +| URI Template | βœ… | +| Container Lifetime | ❌ | +| Process Result | ❌ | +| AsyncAPI Server | ❌ | +| AsyncAPI Outbound Message | ❌ | +| AsyncAPI Subscription | ❌ | +| Workflow Definition Reference | ❌ | +| Subscription Iterator | ❌ | + +We love contributions! Our aim is to have a complete implementation to serve as a reference or to become a project on its own to favor the CNCF Ecosystem. + +If you are willing to help, please [file a sub-task](https://github.com/serverlessworkflow/sdk-go/issues/221) in this EPIC describing what you are planning to work on first. --- ## Slack Community -Join the conversation and connect with other contributors on the [CNCF Slack](https://communityinviter.com/apps/cloud-native/cncf). Find us in the `#serverless-workflow-sdk` channel and say hello! πŸ™‹ +Join our community on the CNCF Slack to collaborate, ask questions, and contribute: + +[CNCF Slack Invite](https://communityinviter.com/apps/cloud-native/cncf) + +Find us in the `#serverless-workflow-sdk` channel. --- ## Contributing -We welcome contributions to improve this SDK. Please refer to the sections below for guidance on maintaining project standards. +Your contributions are very welcome! ### Code Style -- Use `goimports` for import organization. -- Lint your code with: +- Format imports with `goimports`. +- Run static analysis using: -```bash +```shell make lint ``` -To automatically fix lint issues, use: +Automatically fix lint issues: -```bash +```shell make lint params=--fix ``` -Example lint error: - -```bash -$ make lint -make addheaders -make fmt -./hack/go-lint.sh -util/floatstr/floatstr_test.go:19: File is not `goimports`-ed (goimports) - "k8s.io/apimachinery/pkg/util/yaml" -make: *** [lint] Error 1 -``` - ### EditorConfig -For IntelliJ users, an example `.editorconfig` file is available [here](contrib/intellij.editorconfig). See the [Jetbrains documentation](https://www.jetbrains.com/help/idea/editorconfig.html) for usage details. +A sample `.editorconfig` for IntelliJ or GoLand users can be found [here](contrib/intellij.editorconfig). ### Known Issues -#### MacOS Issue: - -On MacOS, you might encounter the following error: +- **MacOS Issue**: If you encounter `goimports: can't extract issues from gofmt diff output`, resolve it with: -``` -goimports: can't extract issues from gofmt diff output +```shell +brew install diffutils ``` -To resolve this, install `diffutils`: +--- -```bash -brew install diffutils -``` +Contributions are greatly appreciated! Check [this EPIC](https://github.com/serverlessworkflow/sdk-go/issues/221) and contribute to completing more features. +Happy coding! diff --git a/builder/builder_test.go b/builder/builder_test.go index cbec324..6bf459c 100644 --- a/builder/builder_test.go +++ b/builder/builder_test.go @@ -18,7 +18,7 @@ import ( "errors" "testing" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/serverlessworkflow/sdk-go/v3/test" @@ -137,7 +137,7 @@ func TestBuilder_Validate(t *testing.T) { Version: "1.0.0", }, Do: &model.TaskList{ - { + &model.TaskItem{ Key: "task1", Task: &model.CallHTTP{ Call: "http", @@ -155,7 +155,7 @@ func TestBuilder_Validate(t *testing.T) { // Test validation failure workflow.Do = &model.TaskList{ - { + &model.TaskItem{ Key: "task2", Task: &model.CallHTTP{ Call: "http", diff --git a/expr/expr.go b/expr/expr.go new file mode 100644 index 0000000..cd5a755 --- /dev/null +++ b/expr/expr.go @@ -0,0 +1,112 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package expr + +import ( + "errors" + "fmt" + "strings" + + "github.com/itchyny/gojq" +) + +// IsStrictExpr returns true if the string is enclosed in `${ }` +func IsStrictExpr(expression string) bool { + return strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") +} + +// Sanitize processes the expression to ensure it's ready for evaluation +// It removes `${}` if present and replaces single quotes with double quotes +func Sanitize(expression string) string { + // Remove `${}` enclosure if present + if IsStrictExpr(expression) { + expression = strings.TrimSpace(expression[2 : len(expression)-1]) + } + + // Replace single quotes with double quotes + expression = strings.ReplaceAll(expression, "'", "\"") + + return expression +} + +// IsValid tries to parse and check if the given value is a valid expression +func IsValid(expression string) bool { + expression = Sanitize(expression) + _, err := gojq.Parse(expression) + return err == nil +} + +// TraverseAndEvaluate recursively processes and evaluates all expressions in a JSON-like structure +func TraverseAndEvaluate(node interface{}, input interface{}) (interface{}, error) { + switch v := node.(type) { + case map[string]interface{}: + // Traverse map + for key, value := range v { + evaluatedValue, err := TraverseAndEvaluate(value, input) + if err != nil { + return nil, err + } + v[key] = evaluatedValue + } + return v, nil + + case []interface{}: + // Traverse array + for i, value := range v { + evaluatedValue, err := TraverseAndEvaluate(value, input) + if err != nil { + return nil, err + } + v[i] = evaluatedValue + } + return v, nil + + case string: + // Check if the string is a runtime expression (e.g., ${ .some.path }) + if IsStrictExpr(v) { + return evaluateJQExpression(Sanitize(v), input) + } + return v, nil + + default: + // Return other types as-is + return v, nil + } +} + +// TODO: add support to variables see https://github.com/itchyny/gojq/blob/main/option_variables_test.go + +// evaluateJQExpression evaluates a jq expression against a given JSON input +func evaluateJQExpression(expression string, input interface{}) (interface{}, error) { + // Parse the sanitized jq expression + query, err := gojq.Parse(expression) + if err != nil { + return nil, fmt.Errorf("failed to parse jq expression: %s, error: %w", expression, err) + } + + // Compile and evaluate the expression + iter := query.Run(input) + result, ok := iter.Next() + if !ok { + return nil, errors.New("no result from jq evaluation") + } + + // Check if an error occurred during evaluation + if err, isErr := result.(error); isErr { + return nil, fmt.Errorf("jq evaluation error: %w", err) + } + + return result, nil +} diff --git a/go.mod b/go.mod index fc847fa..15c63e3 100644 --- a/go.mod +++ b/go.mod @@ -21,6 +21,9 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect golang.org/x/crypto v0.32.0 // indirect golang.org/x/net v0.34.0 // indirect golang.org/x/sys v0.29.0 // indirect diff --git a/go.sum b/go.sum index 257234a..3a19f04 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,4 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= @@ -19,8 +20,11 @@ github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/my github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= @@ -30,6 +34,12 @@ github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JT github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= diff --git a/impl/context.go b/impl/context.go new file mode 100644 index 0000000..ae9375e --- /dev/null +++ b/impl/context.go @@ -0,0 +1,151 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "errors" + "sync" +) + +type ctxKey string + +const runnerCtxKey ctxKey = "wfRunnerContext" + +// WorkflowContext holds the necessary data for the workflow execution within the instance. +type WorkflowContext struct { + mu sync.Mutex + input interface{} // input can hold any type + output interface{} // output can hold any type + context map[string]interface{} + StatusPhase []StatusPhaseLog + TasksStatusPhase map[string][]StatusPhaseLog // Holds `$context` as the key +} + +type TaskContext interface { + SetTaskStatus(task string, status StatusPhase) +} + +func (ctx *WorkflowContext) SetStatus(status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.StatusPhase == nil { + ctx.StatusPhase = []StatusPhaseLog{} + } + ctx.StatusPhase = append(ctx.StatusPhase, NewStatusPhaseLog(status)) +} + +func (ctx *WorkflowContext) SetTaskStatus(task string, status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.TasksStatusPhase == nil { + ctx.TasksStatusPhase = map[string][]StatusPhaseLog{} + } + ctx.TasksStatusPhase[task] = append(ctx.TasksStatusPhase[task], NewStatusPhaseLog(status)) +} + +// SetInstanceCtx safely sets the `$context` value +func (ctx *WorkflowContext) SetInstanceCtx(value interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + ctx.context = make(map[string]interface{}) + } + ctx.context["$context"] = value +} + +// GetInstanceCtx safely retrieves the `$context` value +func (ctx *WorkflowContext) GetInstanceCtx() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + return nil + } + return ctx.context["$context"] +} + +// SetInput safely sets the input +func (ctx *WorkflowContext) SetInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.input = input +} + +// GetInput safely retrieves the input +func (ctx *WorkflowContext) GetInput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.input +} + +// SetOutput safely sets the output +func (ctx *WorkflowContext) SetOutput(output interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.output = output +} + +// GetOutput safely retrieves the output +func (ctx *WorkflowContext) GetOutput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.output +} + +// GetInputAsMap safely retrieves the input as a map[string]interface{}. +// If input is not a map, it creates a map with an empty string key and the input as the value. +func (ctx *WorkflowContext) GetInputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if inputMap, ok := ctx.input.(map[string]interface{}); ok { + return inputMap + } + + // If input is not a map, create a map with an empty key and set input as the value + return map[string]interface{}{ + "": ctx.input, + } +} + +// GetOutputAsMap safely retrieves the output as a map[string]interface{}. +// If output is not a map, it creates a map with an empty string key and the output as the value. +func (ctx *WorkflowContext) GetOutputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if outputMap, ok := ctx.output.(map[string]interface{}); ok { + return outputMap + } + + // If output is not a map, create a map with an empty key and set output as the value + return map[string]interface{}{ + "": ctx.output, + } +} + +// WithWorkflowContext adds the WorkflowContext to a parent context +func WithWorkflowContext(parent context.Context, wfCtx *WorkflowContext) context.Context { + return context.WithValue(parent, runnerCtxKey, wfCtx) +} + +// GetWorkflowContext retrieves the WorkflowContext from a context +func GetWorkflowContext(ctx context.Context) (*WorkflowContext, error) { + wfCtx, ok := ctx.Value(runnerCtxKey).(*WorkflowContext) + if !ok { + return nil, errors.New("workflow context not found") + } + return wfCtx, nil +} diff --git a/impl/json_schema.go b/impl/json_schema.go new file mode 100644 index 0000000..396f9f5 --- /dev/null +++ b/impl/json_schema.go @@ -0,0 +1,70 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/xeipuuv/gojsonschema" +) + +// ValidateJSONSchema validates the provided data against a model.Schema. +func ValidateJSONSchema(data interface{}, schema *model.Schema) error { + if schema == nil { + return nil + } + + schema.ApplyDefaults() + + if schema.Format != model.DefaultSchema { + return fmt.Errorf("unsupported schema format: '%s'", schema.Format) + } + + var schemaJSON string + if schema.Document != nil { + documentBytes, err := json.Marshal(schema.Document) + if err != nil { + return fmt.Errorf("failed to marshal schema document to JSON: %w", err) + } + schemaJSON = string(documentBytes) + } else if schema.Resource != nil { + // TODO: Handle external resource references (not implemented here) + return errors.New("external resources are not yet supported") + } else { + return errors.New("schema must have either a 'Document' or 'Resource'") + } + + schemaLoader := gojsonschema.NewStringLoader(schemaJSON) + dataLoader := gojsonschema.NewGoLoader(data) + + result, err := gojsonschema.Validate(schemaLoader, dataLoader) + if err != nil { + // TODO: use model.Error + return fmt.Errorf("failed to validate JSON schema: %w", err) + } + + if !result.Valid() { + var validationErrors string + for _, err := range result.Errors() { + validationErrors += fmt.Sprintf("- %s\n", err.String()) + } + return fmt.Errorf("JSON schema validation failed:\n%s", validationErrors) + } + + return nil +} diff --git a/impl/runner.go b/impl/runner.go new file mode 100644 index 0000000..c219886 --- /dev/null +++ b/impl/runner.go @@ -0,0 +1,124 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var _ WorkflowRunner = &workflowRunnerImpl{} + +type WorkflowRunner interface { + GetWorkflowDef() *model.Workflow + Run(input interface{}) (output interface{}, err error) + GetContext() *WorkflowContext +} + +func NewDefaultRunner(workflow *model.Workflow) WorkflowRunner { + wfContext := &WorkflowContext{} + wfContext.SetStatus(PendingStatus) + // TODO: based on the workflow definition, the context might change. + ctx := WithWorkflowContext(context.Background(), wfContext) + return &workflowRunnerImpl{ + Workflow: workflow, + Context: ctx, + RunnerCtx: wfContext, + } +} + +type workflowRunnerImpl struct { + Workflow *model.Workflow + Context context.Context + RunnerCtx *WorkflowContext +} + +func (wr *workflowRunnerImpl) GetContext() *WorkflowContext { + return wr.RunnerCtx +} + +func (wr *workflowRunnerImpl) GetTaskContext() TaskContext { + return wr.RunnerCtx +} + +func (wr *workflowRunnerImpl) GetWorkflowDef() *model.Workflow { + return wr.Workflow +} + +// Run executes the workflow synchronously. +func (wr *workflowRunnerImpl) Run(input interface{}) (output interface{}, err error) { + defer func() { + if err != nil { + wr.RunnerCtx.SetStatus(FaultedStatus) + err = wr.wrapWorkflowError(err, "/") + } + }() + + // Process input + if input, err = wr.processInput(input); err != nil { + return nil, err + } + + wr.RunnerCtx.SetInput(input) + // Run tasks sequentially + wr.RunnerCtx.SetStatus(RunningStatus) + doRunner, err := NewDoTaskRunner(wr.Workflow.Do, wr) + if err != nil { + return nil, err + } + output, err = doRunner.Run(wr.RunnerCtx.GetInput()) + if err != nil { + return nil, err + } + + // Process output + if output, err = wr.processOutput(output); err != nil { + return nil, err + } + + wr.RunnerCtx.SetOutput(output) + wr.RunnerCtx.SetStatus(CompletedStatus) + return output, nil +} + +// wrapWorkflowError ensures workflow errors have a proper instance reference. +func (wr *workflowRunnerImpl) wrapWorkflowError(err error, taskName string) error { + if knownErr := model.AsError(err); knownErr != nil { + return knownErr.WithInstanceRef(wr.Workflow, taskName) + } + return model.NewErrRuntime(fmt.Errorf("workflow '%s', task '%s': %w", wr.Workflow.Document.Name, taskName, err), taskName) +} + +// processInput validates and transforms input if needed. +func (wr *workflowRunnerImpl) processInput(input interface{}) (output interface{}, err error) { + if wr.Workflow.Input != nil { + output, err = processIO(input, wr.Workflow.Input.Schema, wr.Workflow.Input.From, "/") + if err != nil { + return nil, err + } + return output, nil + } + return input, nil +} + +// processOutput applies output transformations. +func (wr *workflowRunnerImpl) processOutput(output interface{}) (interface{}, error) { + if wr.Workflow.Output != nil { + return processIO(output, wr.Workflow.Output.Schema, wr.Workflow.Output.As, "/") + } + return output, nil +} diff --git a/impl/status_phase.go b/impl/status_phase.go new file mode 100644 index 0000000..ca61fad --- /dev/null +++ b/impl/status_phase.go @@ -0,0 +1,52 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import "time" + +type StatusPhase string + +const ( + // PendingStatus The workflow/task has been initiated and is pending execution. + PendingStatus StatusPhase = "pending" + // RunningStatus The workflow/task is currently in progress. + RunningStatus StatusPhase = "running" + // WaitingStatus The workflow/task execution is temporarily paused, awaiting either inbound event(s) or a specified time interval as defined by a wait task. + WaitingStatus StatusPhase = "waiting" + // SuspendedStatus The workflow/task execution has been manually paused by a user and will remain halted until explicitly resumed. + SuspendedStatus StatusPhase = "suspended" + // CancelledStatus The workflow/task execution has been terminated before completion. + CancelledStatus StatusPhase = "cancelled" + // FaultedStatus The workflow/task execution has encountered an error. + FaultedStatus StatusPhase = "faulted" + // CompletedStatus The workflow/task ran to completion. + CompletedStatus StatusPhase = "completed" +) + +func (s StatusPhase) String() string { + return string(s) +} + +type StatusPhaseLog struct { + Timestamp int64 `json:"timestamp"` + Status StatusPhase `json:"status"` +} + +func NewStatusPhaseLog(status StatusPhase) StatusPhaseLog { + return StatusPhaseLog{ + Status: status, + Timestamp: time.Now().UnixMilli(), + } +} diff --git a/impl/task_runner.go b/impl/task_runner.go new file mode 100644 index 0000000..05d3817 --- /dev/null +++ b/impl/task_runner.go @@ -0,0 +1,252 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "reflect" + "strings" + + "github.com/serverlessworkflow/sdk-go/v3/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var _ TaskRunner = &SetTaskRunner{} +var _ TaskRunner = &RaiseTaskRunner{} +var _ TaskRunner = &ForTaskRunner{} + +type TaskRunner interface { + Run(input interface{}) (interface{}, error) + GetTaskName() string +} + +func NewSetTaskRunner(taskName string, task *model.SetTask) (*SetTaskRunner, error) { + if task == nil || task.Set == nil { + return nil, model.NewErrValidation(fmt.Errorf("no set configuration provided for SetTask %s", taskName), taskName) + } + return &SetTaskRunner{ + Task: task, + TaskName: taskName, + }, nil +} + +type SetTaskRunner struct { + Task *model.SetTask + TaskName string +} + +func (s *SetTaskRunner) GetTaskName() string { + return s.TaskName +} + +func (s *SetTaskRunner) Run(input interface{}) (output interface{}, err error) { + setObject := deepClone(s.Task.Set) + result, err := expr.TraverseAndEvaluate(setObject, input) + if err != nil { + return nil, model.NewErrExpression(err, s.TaskName) + } + + output, ok := result.(map[string]interface{}) + if !ok { + return nil, model.NewErrRuntime(fmt.Errorf("expected output to be a map[string]interface{}, but got a different type. Got: %v", result), s.TaskName) + } + + return output, nil +} + +func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, workflowDef *model.Workflow) (*RaiseTaskRunner, error) { + if err := resolveErrorDefinition(task, workflowDef); err != nil { + return nil, err + } + if task.Raise.Error.Definition == nil { + return nil, model.NewErrValidation(fmt.Errorf("no raise configuration provided for RaiseTask %s", taskName), taskName) + } + return &RaiseTaskRunner{ + Task: task, + TaskName: taskName, + }, nil +} + +// TODO: can e refactored to a definition resolver callable from the context +func resolveErrorDefinition(t *model.RaiseTask, workflowDef *model.Workflow) error { + if workflowDef != nil && t.Raise.Error.Ref != nil { + notFoundErr := model.NewErrValidation(fmt.Errorf("%v error definition not found in 'uses'", t.Raise.Error.Ref), "") + if workflowDef.Use != nil && workflowDef.Use.Errors != nil { + definition, ok := workflowDef.Use.Errors[*t.Raise.Error.Ref] + if !ok { + return notFoundErr + } + t.Raise.Error.Definition = definition + return nil + } + return notFoundErr + } + return nil +} + +type RaiseTaskRunner struct { + Task *model.RaiseTask + TaskName string +} + +var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ + model.ErrorTypeAuthentication: model.NewErrAuthentication, + model.ErrorTypeValidation: model.NewErrValidation, + model.ErrorTypeCommunication: model.NewErrCommunication, + model.ErrorTypeAuthorization: model.NewErrAuthorization, + model.ErrorTypeConfiguration: model.NewErrConfiguration, + model.ErrorTypeExpression: model.NewErrExpression, + model.ErrorTypeRuntime: model.NewErrRuntime, + model.ErrorTypeTimeout: model.NewErrTimeout, +} + +func (r *RaiseTaskRunner) Run(input interface{}) (output interface{}, err error) { + output = input + // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition + var detailResult interface{} + detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName) + if err != nil { + return nil, err + } + + var titleResult interface{} + titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName) + if err != nil { + return nil, err + } + + instance := &model.JsonPointerOrRuntimeExpression{Value: r.TaskName} + + var raiseErr *model.Error + if raiseErrF, ok := raiseErrFuncMapping[r.Task.Raise.Error.Definition.Type.String()]; ok { + raiseErr = raiseErrF(fmt.Errorf("%v", detailResult), instance.String()) + } else { + raiseErr = r.Task.Raise.Error.Definition + raiseErr.Detail = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", detailResult)) + raiseErr.Instance = instance + } + + raiseErr.Title = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", titleResult)) + err = raiseErr + + return output, err +} + +func (r *RaiseTaskRunner) GetTaskName() string { + return r.TaskName +} + +func NewForTaskRunner(taskName string, task *model.ForTask, taskSupport TaskSupport) (*ForTaskRunner, error) { + if task == nil || task.Do == nil { + return nil, model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) + } + + doRunner, err := NewDoTaskRunner(task.Do, taskSupport) + if err != nil { + return nil, err + } + + return &ForTaskRunner{ + Task: task, + TaskName: taskName, + DoRunner: doRunner, + }, nil +} + +const ( + forTaskDefaultEach = "$item" + forTaskDefaultAt = "$index" +) + +type ForTaskRunner struct { + Task *model.ForTask + TaskName string + DoRunner *DoTaskRunner +} + +func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { + f.sanitizeFor() + in, err := expr.TraverseAndEvaluate(f.Task.For.In, input) + if err != nil { + return nil, err + } + + var forOutput interface{} + rv := reflect.ValueOf(in) + switch rv.Kind() { + case reflect.Slice, reflect.Array: + for i := 0; i < rv.Len(); i++ { + item := rv.Index(i).Interface() + + if forOutput, err = f.processForItem(i, item, forOutput); err != nil { + return nil, err + } + } + case reflect.Invalid: + return input, nil + default: + if forOutput, err = f.processForItem(0, in, forOutput); err != nil { + return nil, err + } + } + + return forOutput, nil +} + +func (f *ForTaskRunner) processForItem(idx int, item interface{}, forOutput interface{}) (interface{}, error) { + forInput := map[string]interface{}{ + f.Task.For.At: idx, + f.Task.For.Each: item, + } + if forOutput != nil { + if outputMap, ok := forOutput.(map[string]interface{}); ok { + for key, value := range outputMap { + forInput[key] = value + } + } else { + return nil, fmt.Errorf("task %s item %s at index %d returned a non-json object, impossible to merge context", f.TaskName, f.Task.For.Each, idx) + } + } + var err error + forOutput, err = f.DoRunner.Run(forInput) + if err != nil { + return nil, err + } + + return forOutput, nil +} + +func (f *ForTaskRunner) sanitizeFor() { + f.Task.For.Each = strings.TrimSpace(f.Task.For.Each) + f.Task.For.At = strings.TrimSpace(f.Task.For.At) + + if f.Task.For.Each == "" { + f.Task.For.Each = forTaskDefaultEach + } + if f.Task.For.At == "" { + f.Task.For.At = forTaskDefaultAt + } + + if !strings.HasPrefix(f.Task.For.Each, "$") { + f.Task.For.Each = "$" + f.Task.For.Each + } + if !strings.HasPrefix(f.Task.For.At, "$") { + f.Task.For.At = "$" + f.Task.For.At + } +} + +func (f *ForTaskRunner) GetTaskName() string { + return f.TaskName +} diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go new file mode 100644 index 0000000..a34a4dd --- /dev/null +++ b/impl/task_runner_do.go @@ -0,0 +1,178 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +var _ TaskRunner = &DoTaskRunner{} + +type TaskSupport interface { + GetTaskContext() TaskContext + GetWorkflowDef() *model.Workflow +} + +// TODO: refactor to receive a resolver handler instead of the workflow runner + +// NewTaskRunner creates a TaskRunner instance based on the task type. +func NewTaskRunner(taskName string, task model.Task, taskSupport TaskSupport) (TaskRunner, error) { + switch t := task.(type) { + case *model.SetTask: + return NewSetTaskRunner(taskName, t) + case *model.RaiseTask: + return NewRaiseTaskRunner(taskName, t, taskSupport.GetWorkflowDef()) + case *model.DoTask: + return NewDoTaskRunner(t.Do, taskSupport) + case *model.ForTask: + return NewForTaskRunner(taskName, t, taskSupport) + default: + return nil, fmt.Errorf("unsupported task type '%T' for task '%s'", t, taskName) + } +} + +func NewDoTaskRunner(taskList *model.TaskList, taskSupport TaskSupport) (*DoTaskRunner, error) { + return &DoTaskRunner{ + TaskList: taskList, + TaskSupport: taskSupport, + }, nil +} + +type DoTaskRunner struct { + TaskList *model.TaskList + TaskSupport TaskSupport +} + +func (d *DoTaskRunner) Run(input interface{}) (output interface{}, err error) { + if d.TaskList == nil { + return input, nil + } + return d.executeTasks(input, d.TaskList) +} + +func (d *DoTaskRunner) GetTaskName() string { + return "" +} + +// executeTasks runs all defined tasks sequentially. +func (d *DoTaskRunner) executeTasks(input interface{}, tasks *model.TaskList) (output interface{}, err error) { + output = input + if tasks == nil { + return output, nil + } + + idx := 0 + currentTask := (*tasks)[idx] + ctx := d.TaskSupport.GetTaskContext() + + for currentTask != nil { + if shouldRun, err := d.shouldRunTask(input, currentTask); err != nil { + return output, err + } else if !shouldRun { + idx, currentTask = tasks.Next(idx) + continue + } + + ctx.SetTaskStatus(currentTask.Key, PendingStatus) + runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, d.TaskSupport) + if err != nil { + return output, err + } + + ctx.SetTaskStatus(currentTask.Key, RunningStatus) + if output, err = d.runTask(input, runner, currentTask.Task.GetBase()); err != nil { + ctx.SetTaskStatus(currentTask.Key, FaultedStatus) + return output, err + } + + ctx.SetTaskStatus(currentTask.Key, CompletedStatus) + input = deepCloneValue(output) + idx, currentTask = tasks.Next(idx) + } + + return output, nil +} + +func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (bool, error) { + if task.GetBase().If != nil { + output, err := expr.TraverseAndEvaluate(task.GetBase().If.String(), input) + if err != nil { + return false, model.NewErrExpression(err, task.Key) + } + if result, ok := output.(bool); ok && !result { + return false, nil + } + } + return true, nil +} + +// runTask executes an individual task. +func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { + taskName := runner.GetTaskName() + + if task.Input != nil { + if input, err = d.processTaskInput(task, input, taskName); err != nil { + return nil, err + } + } + + output, err = runner.Run(input) + if err != nil { + return nil, err + } + + if output, err = d.processTaskOutput(task, output, taskName); err != nil { + return nil, err + } + + return output, nil +} + +// processTaskInput processes task input validation and transformation. +func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interface{}, taskName string) (output interface{}, err error) { + if task.Input == nil { + return taskInput, nil + } + + if err = validateSchema(taskInput, task.Input.Schema, taskName); err != nil { + return nil, err + } + + if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName); err != nil { + return nil, err + } + + return output, nil +} + +// processTaskOutput processes task output validation and transformation. +func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interface{}, taskName string) (output interface{}, err error) { + if task.Output == nil { + return taskOutput, nil + } + + if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName); err != nil { + return nil, err + } + + if err = validateSchema(output, task.Output.Schema, taskName); err != nil { + return nil, err + } + + return output, nil +} diff --git a/impl/task_runner_raise_test.go b/impl/task_runner_raise_test.go new file mode 100644 index 0000000..3527283 --- /dev/null +++ b/impl/task_runner_raise_test.go @@ -0,0 +1,165 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +func TestRaiseTaskRunner_WithDefinedError(t *testing.T) { + input := map[string]interface{}{} + + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeValidation), + Status: 400, + Title: model.NewStringOrRuntimeExpr("Validation Error"), + Detail: model.NewStringOrRuntimeExpr("Invalid input data"), + }, + }, + }, + } + + runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, nil) + assert.NoError(t, err) + + output, err := runner.Run(input) + assert.Equal(t, output, input) + assert.Error(t, err) + + expectedErr := model.NewErrValidation(errors.New("Invalid input data"), "task_raise_defined") + + var modelErr *model.Error + if errors.As(err, &modelErr) { + assert.Equal(t, expectedErr.Type.String(), modelErr.Type.String()) + assert.Equal(t, expectedErr.Status, modelErr.Status) + assert.Equal(t, expectedErr.Title.String(), modelErr.Title.String()) + assert.Equal(t, "Invalid input data", modelErr.Detail.String()) + assert.Equal(t, expectedErr.Instance.String(), modelErr.Instance.String()) + } else { + t.Errorf("expected error of type *model.Error but got %T", err) + } +} + +func TestRaiseTaskRunner_WithReferencedError(t *testing.T) { + ref := "someErrorRef" + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Ref: &ref, + }, + }, + } + + runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, nil) + assert.Error(t, err) + assert.Nil(t, runner) +} + +func TestRaiseTaskRunner_TimeoutErrorWithExpression(t *testing.T) { + input := map[string]interface{}{ + "timeoutMessage": "Request took too long", + } + + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeTimeout), + Status: 408, + Title: model.NewStringOrRuntimeExpr("Timeout Error"), + Detail: model.NewStringOrRuntimeExpr("${ .timeoutMessage }"), + }, + }, + }, + } + + runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, nil) + assert.NoError(t, err) + + output, err := runner.Run(input) + assert.Equal(t, input, output) + assert.Error(t, err) + + expectedErr := model.NewErrTimeout(errors.New("Request took too long"), "task_raise_timeout_expr") + + var modelErr *model.Error + if errors.As(err, &modelErr) { + assert.Equal(t, expectedErr.Type.String(), modelErr.Type.String()) + assert.Equal(t, expectedErr.Status, modelErr.Status) + assert.Equal(t, expectedErr.Title.String(), modelErr.Title.String()) + assert.Equal(t, "Request took too long", modelErr.Detail.String()) + assert.Equal(t, expectedErr.Instance.String(), modelErr.Instance.String()) + } else { + t.Errorf("expected error of type *model.Error but got %T", err) + } +} + +func TestRaiseTaskRunner_Serialization(t *testing.T) { + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Definition: &model.Error{ + Type: model.NewUriTemplate(model.ErrorTypeRuntime), + Status: 500, + Title: model.NewStringOrRuntimeExpr("Runtime Error"), + Detail: model.NewStringOrRuntimeExpr("Unexpected failure"), + Instance: &model.JsonPointerOrRuntimeExpression{Value: "/task_runtime"}, + }, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + + var deserializedTask model.RaiseTask + err = json.Unmarshal(data, &deserializedTask) + assert.NoError(t, err) + + assert.Equal(t, raiseTask.Raise.Error.Definition.Type.String(), deserializedTask.Raise.Error.Definition.Type.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Status, deserializedTask.Raise.Error.Definition.Status) + assert.Equal(t, raiseTask.Raise.Error.Definition.Title.String(), deserializedTask.Raise.Error.Definition.Title.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Detail.String(), deserializedTask.Raise.Error.Definition.Detail.String()) + assert.Equal(t, raiseTask.Raise.Error.Definition.Instance.String(), deserializedTask.Raise.Error.Definition.Instance.String()) +} + +func TestRaiseTaskRunner_ReferenceSerialization(t *testing.T) { + ref := "errorReference" + raiseTask := &model.RaiseTask{ + Raise: model.RaiseTaskConfiguration{ + Error: model.RaiseTaskError{ + Ref: &ref, + }, + }, + } + + data, err := json.Marshal(raiseTask) + assert.NoError(t, err) + + var deserializedTask model.RaiseTask + err = json.Unmarshal(data, &deserializedTask) + assert.NoError(t, err) + + assert.Equal(t, *raiseTask.Raise.Error.Ref, *deserializedTask.Raise.Error.Ref) + assert.Nil(t, deserializedTask.Raise.Error.Definition) +} diff --git a/impl/task_runner_test.go b/impl/task_runner_test.go new file mode 100644 index 0000000..c5a76d7 --- /dev/null +++ b/impl/task_runner_test.go @@ -0,0 +1,330 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "os" + "path/filepath" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/serverlessworkflow/sdk-go/v3/parser" + "github.com/stretchr/testify/assert" +) + +// runWorkflowTest is a reusable test function for workflows +func runWorkflowTest(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}) { + // Run the workflow + output, err := runWorkflow(t, workflowPath, input, expectedOutput) + assert.NoError(t, err) + + assertWorkflowRun(t, expectedOutput, output) +} + +func runWorkflowWithErr(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}, assertErr func(error)) { + output, err := runWorkflow(t, workflowPath, input, expectedOutput) + assert.Error(t, err) + assertErr(err) + assertWorkflowRun(t, expectedOutput, output) +} + +func runWorkflow(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}) (output interface{}, err error) { + // Read the workflow YAML from the testdata directory + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + + // Parse the YAML workflow + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + + // Initialize the workflow runner + runner := NewDefaultRunner(workflow) + + // Run the workflow + output, err = runner.Run(input) + return output, err +} + +func assertWorkflowRun(t *testing.T, expectedOutput map[string]interface{}, output interface{}) { + if expectedOutput == nil { + assert.Nil(t, output, "Expected nil Workflow run output") + } else { + assert.Equal(t, expectedOutput, output, "Workflow output mismatch") + } +} + +// TestWorkflowRunner_Run_YAML validates multiple workflows +func TestWorkflowRunner_Run_YAML(t *testing.T) { + // Workflow 1: Chained Set Tasks + t.Run("Chained Set Tasks", func(t *testing.T) { + workflowPath := "./testdata/chained_set_tasks.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "tripled": float64(60), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 2: Concatenating Strings + t.Run("Concatenating Strings", func(t *testing.T) { + workflowPath := "./testdata/concatenating_strings.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "fullName": "John Doe", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 3: Conditional Logic + t.Run("Conditional Logic", func(t *testing.T) { + workflowPath := "./testdata/conditional_logic.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Conditional Logic", func(t *testing.T) { + workflowPath := "./testdata/sequential_set_colors.yaml" + // Define the input and expected output + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "resultColors": []interface{}{"red", "green", "blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + t.Run("input From", func(t *testing.T) { + workflowPath := "./testdata/sequential_set_colors_output_as.yaml" + // Define the input and expected output + expectedOutput := map[string]interface{}{ + "result": []interface{}{"red", "green", "blue"}, + } + runWorkflowTest(t, workflowPath, nil, expectedOutput) + }) + t.Run("input From", func(t *testing.T) { + workflowPath := "./testdata/conditional_logic_input_from.yaml" + // Define the input and expected output + input := map[string]interface{}{ + "localWeather": map[string]interface{}{ + "temperature": 34, + }, + } + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_WithSchemaValidation(t *testing.T) { + // Workflow 1: Workflow input Schema Validation + t.Run("Workflow input Schema Validation - Valid input", func(t *testing.T) { + workflowPath := "./testdata/workflow_input_schema.yaml" + input := map[string]interface{}{ + "key": "value", + } + expectedOutput := map[string]interface{}{ + "outputKey": "value", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Workflow input Schema Validation - Invalid input", func(t *testing.T) { + workflowPath := "./testdata/workflow_input_schema.yaml" + input := map[string]interface{}{ + "wrongKey": "value", + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner := NewDefaultRunner(workflow) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid input") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + // Workflow 2: Task input Schema Validation + t.Run("Task input Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_input_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": 42, + } + expectedOutput := map[string]interface{}{ + "taskOutputKey": 84, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Task input Schema Validation - Invalid input", func(t *testing.T) { + workflowPath := "./testdata/task_input_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": "invalidValue", + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner := NewDefaultRunner(workflow) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid task input") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + // Workflow 3: Task output Schema Validation + t.Run("Task output Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema.yaml" + input := map[string]interface{}{ + "taskInputKey": "value", + } + expectedOutput := map[string]interface{}{ + "finalOutputKey": "resultValue", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Task output Schema Validation - Invalid output", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema_with_dynamic_value.yaml" + input := map[string]interface{}{ + "taskInputKey": 123, // Invalid value (not a string) + } + yamlBytes, err := os.ReadFile(filepath.Clean(workflowPath)) + assert.NoError(t, err, "Failed to read workflow YAML file") + workflow, err := parser.FromYAMLSource(yamlBytes) + assert.NoError(t, err, "Failed to parse workflow YAML") + runner := NewDefaultRunner(workflow) + _, err = runner.Run(input) + assert.Error(t, err, "Expected validation error for invalid task output") + assert.Contains(t, err.Error(), "JSON schema validation failed") + }) + + t.Run("Task output Schema Validation - Valid output", func(t *testing.T) { + workflowPath := "./testdata/task_output_schema_with_dynamic_value.yaml" + input := map[string]interface{}{ + "taskInputKey": "validValue", // Valid value + } + expectedOutput := map[string]interface{}{ + "finalOutputKey": "validValue", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + // Workflow 4: Task Export Schema Validation + t.Run("Task Export Schema Validation", func(t *testing.T) { + workflowPath := "./testdata/task_export_schema.yaml" + input := map[string]interface{}{ + "key": "value", + } + expectedOutput := map[string]interface{}{ + "exportedKey": "value", + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_ControlFlow(t *testing.T) { + t.Run("Set Tasks with Then Directive", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_with_then.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "result": float64(90), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Set Tasks with Termination", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_with_termination.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "finalValue": float64(20), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Set Tasks with Invalid Then Reference", func(t *testing.T) { + workflowPath := "./testdata/set_tasks_invalid_then.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "partialResult": float64(15), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestWorkflowRunner_Run_YAML_RaiseTasks(t *testing.T) { + // TODO: add $workflow context to the expr processing + //t.Run("Raise Inline Error", func(t *testing.T) { + // runWorkflowTest(t, "./testdata/raise_inline.yaml", nil, nil) + //}) + + t.Run("Raise Referenced Error", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_reusable.yaml", nil, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthentication, model.AsError(err).Type.String()) + }) + }) + + t.Run("Raise Error with Dynamic Detail", func(t *testing.T) { + input := map[string]interface{}{ + "reason": "User token expired", + } + runWorkflowWithErr(t, "./testdata/raise_error_with_input.yaml", input, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthentication, model.AsError(err).Type.String()) + assert.Equal(t, "User authentication failed: User token expired", model.AsError(err).Detail.String()) + }) + }) + + t.Run("Raise Undefined Error Reference", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_undefined_reference.yaml", nil, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeValidation, model.AsError(err).Type.String()) + }) + }) +} + +func TestWorkflowRunner_Run_YAML_RaiseTasks_ControlFlow(t *testing.T) { + t.Run("Raise Error with Conditional Logic", func(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "age": 16, + }, + } + runWorkflowWithErr(t, "./testdata/raise_conditional.yaml", input, nil, + func(err error) { + assert.Equal(t, model.ErrorTypeAuthorization, model.AsError(err).Type.String()) + assert.Equal(t, "User is under the required age", model.AsError(err).Detail.String()) + }) + }) +} + +func TestForTaskRunner_Run(t *testing.T) { + t.Skip("Skipping until the For task is implemented - missing JQ variables implementation") + t.Run("Simple For with Colors", func(t *testing.T) { + workflowPath := "./testdata/for_colors.yaml" + input := map[string]interface{}{ + "colors": []string{"red", "green", "blue"}, + } + expectedOutput := map[string]interface{}{ + "processed": map[string]interface{}{ + "colors": []string{"red", "green", "blue"}, + "indexed": []float64{0, 1, 2}, + }, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + +} diff --git a/impl/task_set_test.go b/impl/task_set_test.go new file mode 100644 index 0000000..48ca18b --- /dev/null +++ b/impl/task_set_test.go @@ -0,0 +1,416 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "reflect" + "testing" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +func TestSetTaskExecutor_Exec(t *testing.T) { + input := map[string]interface{}{ + "configuration": map[string]interface{}{ + "size": map[string]interface{}{ + "width": 6, + "height": 6, + }, + "fill": map[string]interface{}{ + "red": 69, + "green": 69, + "blue": 69, + }, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "shape": "circle", + "size": "${ .configuration.size }", + "fill": "${ .configuration.fill }", + }, + } + + executor, err := NewSetTaskRunner("task1", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "shape": "circle", + "size": map[string]interface{}{ + "width": 6, + "height": 6, + }, + "fill": map[string]interface{}{ + "red": 69, + "green": 69, + "blue": 69, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_StaticValues(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "completed", + "count": 10, + }, + } + + executor, err := NewSetTaskRunner("task_static", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "completed", + "count": 10, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_RuntimeExpressions(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "firstName": "John", + "lastName": "Doe", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "fullName": "${ \"\\(.user.firstName) \\(.user.lastName)\" }", + }, + } + + executor, err := NewSetTaskRunner("task_runtime_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "fullName": "John Doe", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_NestedStructures(t *testing.T) { + input := map[string]interface{}{ + "order": map[string]interface{}{ + "id": 12345, + "items": []interface{}{"item1", "item2"}, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "orderDetails": map[string]interface{}{ + "orderId": "${ .order.id }", + "itemCount": "${ .order.items | length }", + }, + }, + } + + executor, err := NewSetTaskRunner("task_nested_structures", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "orderDetails": map[string]interface{}{ + "orderId": 12345, + "itemCount": 2, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_StaticAndDynamicValues(t *testing.T) { + input := map[string]interface{}{ + "config": map[string]interface{}{ + "threshold": 100, + }, + "metrics": map[string]interface{}{ + "current": 75, + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "active", + "remaining": "${ .config.threshold - .metrics.current }", + }, + } + + executor, err := NewSetTaskRunner("task_static_dynamic", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "active", + "remaining": 25, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_MissingInputData(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "value": "${ .missingField }", + }, + } + + executor, err := NewSetTaskRunner("task_missing_input", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + assert.Nil(t, output.(map[string]interface{})["value"]) +} + +func TestSetTaskExecutor_ExpressionsWithFunctions(t *testing.T) { + input := map[string]interface{}{ + "values": []interface{}{1, 2, 3, 4, 5}, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "sum": "${ .values | map(.) | add }", + }, + } + + executor, err := NewSetTaskRunner("task_expr_functions", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "sum": 15, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ConditionalExpressions(t *testing.T) { + input := map[string]interface{}{ + "temperature": 30, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "weather": "${ if .temperature > 25 then 'hot' else 'cold' end }", + }, + } + + executor, err := NewSetTaskRunner("task_conditional_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "weather": "hot", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ArrayDynamicIndex(t *testing.T) { + input := map[string]interface{}{ + "items": []interface{}{"apple", "banana", "cherry"}, + "index": 1, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "selectedItem": "${ .items[.index] }", + }, + } + + executor, err := NewSetTaskRunner("task_array_indexing", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "selectedItem": "banana", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_NestedConditionalLogic(t *testing.T) { + input := map[string]interface{}{ + "age": 20, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "status": "${ if .age < 18 then 'minor' else if .age < 65 then 'adult' else 'senior' end end }", + }, + } + + executor, err := NewSetTaskRunner("task_nested_condition", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "status": "adult", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_DefaultValues(t *testing.T) { + input := map[string]interface{}{} + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "value": "${ .missingField // 'defaultValue' }", + }, + } + + executor, err := NewSetTaskRunner("task_default_values", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "value": "defaultValue", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_ComplexNestedStructures(t *testing.T) { + input := map[string]interface{}{ + "config": map[string]interface{}{ + "dimensions": map[string]interface{}{ + "width": 10, + "height": 5, + }, + }, + "meta": map[string]interface{}{ + "color": "blue", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "shape": map[string]interface{}{ + "type": "rectangle", + "width": "${ .config.dimensions.width }", + "height": "${ .config.dimensions.height }", + "color": "${ .meta.color }", + "area": "${ .config.dimensions.width * .config.dimensions.height }", + }, + }, + } + + executor, err := NewSetTaskRunner("task_complex_nested", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "shape": map[string]interface{}{ + "type": "rectangle", + "width": 10, + "height": 5, + "color": "blue", + "area": 50, + }, + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} + +func TestSetTaskExecutor_MultipleExpressions(t *testing.T) { + input := map[string]interface{}{ + "user": map[string]interface{}{ + "name": "Alice", + "email": "alice@example.com", + }, + } + + setTask := &model.SetTask{ + Set: map[string]interface{}{ + "username": "${ .user.name }", + "contact": "${ .user.email }", + }, + } + + executor, err := NewSetTaskRunner("task_multiple_expr", setTask) + assert.NoError(t, err) + + output, err := executor.Run(input) + assert.NoError(t, err) + + expectedOutput := map[string]interface{}{ + "username": "Alice", + "contact": "alice@example.com", + } + + if !reflect.DeepEqual(output, expectedOutput) { + t.Errorf("expected %v, got %v", expectedOutput, output) + } +} diff --git a/impl/testdata/chained_set_tasks.yaml b/impl/testdata/chained_set_tasks.yaml new file mode 100644 index 0000000..8ee9a9c --- /dev/null +++ b/impl/testdata/chained_set_tasks.yaml @@ -0,0 +1,29 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: chained-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + baseValue: 10 + - task2: + set: + doubled: "${ .baseValue * 2 }" + - task3: + set: + tripled: "${ .doubled * 3 }" diff --git a/impl/testdata/concatenating_strings.yaml b/impl/testdata/concatenating_strings.yaml new file mode 100644 index 0000000..22cd1b2 --- /dev/null +++ b/impl/testdata/concatenating_strings.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: concatenating-strings + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + firstName: "John" + lastName: "" + - task2: + set: + firstName: "${ .firstName }" + lastName: "Doe" + - task3: + set: + fullName: "${ .firstName + ' ' + .lastName }" diff --git a/impl/testdata/conditional_logic.yaml b/impl/testdata/conditional_logic.yaml new file mode 100644 index 0000000..30135a5 --- /dev/null +++ b/impl/testdata/conditional_logic.yaml @@ -0,0 +1,26 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: conditional-logic + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + temperature: 30 + - task2: + set: + weather: "${ if .temperature > 25 then 'hot' else 'cold' end }" diff --git a/impl/testdata/conditional_logic_input_from.yaml b/impl/testdata/conditional_logic_input_from.yaml new file mode 100644 index 0000000..f64f3e8 --- /dev/null +++ b/impl/testdata/conditional_logic_input_from.yaml @@ -0,0 +1,25 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: conditional-logic + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +input: + from: "${ .localWeather }" +do: + - task2: + set: + weather: "${ if .temperature > 25 then 'hot' else 'cold' end }" diff --git a/impl/testdata/for_colors.yaml b/impl/testdata/for_colors.yaml new file mode 100644 index 0000000..ac33620 --- /dev/null +++ b/impl/testdata/for_colors.yaml @@ -0,0 +1,28 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: for + version: '1.0.0' +do: + - loopColors: + for: + each: color + in: '${ .colors }' + do: + - markProcessed: + set: + processed: '${ { colors: (.processed.colors + [ $color ]), indexes: (.processed.indexes + [ $index ])} }' diff --git a/impl/testdata/raise_conditional.yaml b/impl/testdata/raise_conditional.yaml new file mode 100644 index 0000000..2d9f809 --- /dev/null +++ b/impl/testdata/raise_conditional.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# $schema: https://raw.githubusercontent.com/serverlessworkflow/specification/refs/heads/main/schema/workflow.yaml +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-conditional + version: '1.0.0' +do: + - underageError: + if: ${ .user.age < 18 } + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authorization + status: 403 + title: Authorization Error + detail: "User is under the required age" + - continueProcess: + set: + message: "User is allowed" diff --git a/impl/testdata/raise_error_with_input.yaml b/impl/testdata/raise_error_with_input.yaml new file mode 100644 index 0000000..96affe1 --- /dev/null +++ b/impl/testdata/raise_error_with_input.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-with-input + version: '1.0.0' +do: + - dynamicError: + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authentication + status: 401 + title: Authentication Error + detail: '${ "User authentication failed: \( .reason )" }' diff --git a/impl/testdata/raise_inline.yaml b/impl/testdata/raise_inline.yaml new file mode 100644 index 0000000..c464877 --- /dev/null +++ b/impl/testdata/raise_inline.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-inline + version: '1.0.0' +do: + - inlineError: + raise: + error: + type: https://serverlessworkflow.io/spec/1.0.0/errors/validation + status: 400 + title: Validation Error + detail: ${ "Invalid input provided to workflow '\( $workflow.definition.document.name )'" } diff --git a/impl/testdata/raise_reusable.yaml b/impl/testdata/raise_reusable.yaml new file mode 100644 index 0000000..33a203d --- /dev/null +++ b/impl/testdata/raise_reusable.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-reusable + version: '1.0.0' +use: + errors: + AuthenticationError: + type: https://serverlessworkflow.io/spec/1.0.0/errors/authentication + status: 401 + title: Authentication Error + detail: "User is not authenticated" +do: + - authError: + raise: + error: AuthenticationError diff --git a/impl/testdata/raise_undefined_reference.yaml b/impl/testdata/raise_undefined_reference.yaml new file mode 100644 index 0000000..1316818 --- /dev/null +++ b/impl/testdata/raise_undefined_reference.yaml @@ -0,0 +1,23 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: test + name: raise-undefined-reference + version: '1.0.0' +do: + - missingError: + raise: + error: UndefinedError diff --git a/impl/testdata/sequential_set_colors.yaml b/impl/testdata/sequential_set_colors.yaml new file mode 100644 index 0000000..b956c71 --- /dev/null +++ b/impl/testdata/sequential_set_colors.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: default + name: do + version: '1.0.0' +do: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } + output: + as: "${ { resultColors: .colors } }" \ No newline at end of file diff --git a/impl/testdata/sequential_set_colors_output_as.yaml b/impl/testdata/sequential_set_colors_output_as.yaml new file mode 100644 index 0000000..53c4919 --- /dev/null +++ b/impl/testdata/sequential_set_colors_output_as.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0-alpha5' + namespace: default + name: do + version: '1.0.0' +do: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } +output: + as: "${ { result: .colors } }" \ No newline at end of file diff --git a/impl/testdata/set_tasks_invalid_then.yaml b/impl/testdata/set_tasks_invalid_then.yaml new file mode 100644 index 0000000..325c0c2 --- /dev/null +++ b/impl/testdata/set_tasks_invalid_then.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: invalid-then-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + partialResult: 15 + then: nonExistentTask + - task2: + set: + skipped: true diff --git a/impl/testdata/set_tasks_with_termination.yaml b/impl/testdata/set_tasks_with_termination.yaml new file mode 100644 index 0000000..3c819bd --- /dev/null +++ b/impl/testdata/set_tasks_with_termination.yaml @@ -0,0 +1,27 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: termination-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalValue: 20 + then: end + - task2: + set: + skipped: true diff --git a/impl/testdata/set_tasks_with_then.yaml b/impl/testdata/set_tasks_with_then.yaml new file mode 100644 index 0000000..e0f8155 --- /dev/null +++ b/impl/testdata/set_tasks_with_then.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: then-workflow + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + value: 30 + then: task3 + - task2: + set: + skipped: true + - task3: + set: + result: "${ .value * 3 }" diff --git a/impl/testdata/task_export_schema.yaml b/impl/testdata/task_export_schema.yaml new file mode 100644 index 0000000..e63e869 --- /dev/null +++ b/impl/testdata/task_export_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-export-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + exportedKey: "${ .key }" + export: + schema: + format: "json" + document: + type: "object" + properties: + exportedKey: + type: "string" + required: ["exportedKey"] diff --git a/impl/testdata/task_input_schema.yaml b/impl/testdata/task_input_schema.yaml new file mode 100644 index 0000000..d93b574 --- /dev/null +++ b/impl/testdata/task_input_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-input-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + input: + schema: + format: "json" + document: + type: "object" + properties: + taskInputKey: + type: "number" + required: ["taskInputKey"] + set: + taskOutputKey: "${ .taskInputKey * 2 }" diff --git a/impl/testdata/task_output_schema.yaml b/impl/testdata/task_output_schema.yaml new file mode 100644 index 0000000..73d784b --- /dev/null +++ b/impl/testdata/task_output_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-output-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalOutputKey: "resultValue" + output: + schema: + format: "json" + document: + type: "object" + properties: + finalOutputKey: + type: "string" + required: ["finalOutputKey"] diff --git a/impl/testdata/task_output_schema_with_dynamic_value.yaml b/impl/testdata/task_output_schema_with_dynamic_value.yaml new file mode 100644 index 0000000..39a7df9 --- /dev/null +++ b/impl/testdata/task_output_schema_with_dynamic_value.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: task-output-schema-with-dynamic-value + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +do: + - task1: + set: + finalOutputKey: "${ .taskInputKey }" + output: + schema: + format: "json" + document: + type: "object" + properties: + finalOutputKey: + type: "string" + required: ["finalOutputKey"] diff --git a/impl/testdata/workflow_input_schema.yaml b/impl/testdata/workflow_input_schema.yaml new file mode 100644 index 0000000..fabf484 --- /dev/null +++ b/impl/testdata/workflow_input_schema.yaml @@ -0,0 +1,32 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + name: workflow-input-schema + dsl: '1.0.0-alpha5' + namespace: default + version: '1.0.0' +input: + schema: + format: "json" + document: + type: "object" + properties: + key: + type: "string" + required: ["key"] +do: + - task1: + set: + outputKey: "${ .key }" diff --git a/impl/utils.go b/impl/utils.go new file mode 100644 index 0000000..2cdf952 --- /dev/null +++ b/impl/utils.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "github.com/serverlessworkflow/sdk-go/v3/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +// Deep clone a map to avoid modifying the original object +func deepClone(obj map[string]interface{}) map[string]interface{} { + clone := make(map[string]interface{}) + for key, value := range obj { + clone[key] = deepCloneValue(value) + } + return clone +} + +func deepCloneValue(value interface{}) interface{} { + if m, ok := value.(map[string]interface{}); ok { + return deepClone(m) + } + if s, ok := value.([]interface{}); ok { + clonedSlice := make([]interface{}, len(s)) + for i, v := range s { + clonedSlice[i] = deepCloneValue(v) + } + return clonedSlice + } + return value +} + +func validateSchema(data interface{}, schema *model.Schema, taskName string) error { + if schema != nil { + if err := ValidateJSONSchema(data, schema); err != nil { + return model.NewErrValidation(err, taskName) + } + } + return nil +} + +func traverseAndEvaluate(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string) (output interface{}, err error) { + if runtimeExpr == nil { + return input, nil + } + output, err = expr.TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input) + if err != nil { + return nil, model.NewErrExpression(err, taskName) + } + return output, nil +} + +func processIO(data interface{}, schema *model.Schema, transformation *model.ObjectOrRuntimeExpr, taskName string) (interface{}, error) { + if schema != nil { + if err := validateSchema(data, schema, taskName); err != nil { + return nil, err + } + } + + if transformation != nil { + transformed, err := traverseAndEvaluate(transformation, data, taskName) + if err != nil { + return nil, err + } + return transformed, nil + } + + return data, nil +} diff --git a/model/endpoint.go b/model/endpoint.go index 9c59fb5..38e2cea 100644 --- a/model/endpoint.go +++ b/model/endpoint.go @@ -33,6 +33,7 @@ var LiteralUriTemplatePattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9+\-.]*:// type URITemplate interface { IsURITemplate() bool String() string + GetValue() interface{} } // UnmarshalURITemplate is a shared function for unmarshalling URITemplate fields. @@ -69,6 +70,10 @@ func (t *LiteralUriTemplate) String() string { return t.Value } +func (t *LiteralUriTemplate) GetValue() interface{} { + return t.Value +} + type LiteralUri struct { Value string `json:"-" validate:"required,uri_pattern"` // Validate pattern for URI. } @@ -85,6 +90,10 @@ func (u *LiteralUri) String() string { return u.Value } +func (u *LiteralUri) GetValue() interface{} { + return u.Value +} + type EndpointConfiguration struct { URI URITemplate `json:"uri" validate:"required"` Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` diff --git a/model/endpoint_test.go b/model/endpoint_test.go index 59ddd45..974216e 100644 --- a/model/endpoint_test.go +++ b/model/endpoint_test.go @@ -79,7 +79,7 @@ func TestEndpoint_UnmarshalJSON(t *testing.T) { assert.Error(t, err, "Unmarshal should return an error for invalid JSON structure") }) - t.Run("Empty Input", func(t *testing.T) { + t.Run("Empty input", func(t *testing.T) { input := `{}` var endpoint Endpoint err := json.Unmarshal([]byte(input), &endpoint) @@ -99,7 +99,7 @@ func TestEndpoint_MarshalJSON(t *testing.T) { data, err := json.Marshal(endpoint) assert.NoError(t, err, "Marshal should not return an error") - assert.JSONEq(t, `"${example}"`, string(data), "Output JSON should match") + assert.JSONEq(t, `"${example}"`, string(data), "output JSON should match") }) t.Run("Marshal URITemplate", func(t *testing.T) { @@ -109,7 +109,7 @@ func TestEndpoint_MarshalJSON(t *testing.T) { data, err := json.Marshal(endpoint) assert.NoError(t, err, "Marshal should not return an error") - assert.JSONEq(t, `"http://example.com/{id}"`, string(data), "Output JSON should match") + assert.JSONEq(t, `"http://example.com/{id}"`, string(data), "output JSON should match") }) t.Run("Marshal EndpointConfiguration", func(t *testing.T) { @@ -131,7 +131,7 @@ func TestEndpoint_MarshalJSON(t *testing.T) { "basic": { "username": "john", "password": "secret" } } }` - assert.JSONEq(t, expected, string(data), "Output JSON should match") + assert.JSONEq(t, expected, string(data), "output JSON should match") }) t.Run("Marshal Empty Endpoint", func(t *testing.T) { @@ -139,6 +139,6 @@ func TestEndpoint_MarshalJSON(t *testing.T) { data, err := json.Marshal(endpoint) assert.NoError(t, err, "Marshal should not return an error") - assert.JSONEq(t, `{}`, string(data), "Output JSON should be empty") + assert.JSONEq(t, `{}`, string(data), "output JSON should be empty") }) } diff --git a/model/errors.go b/model/errors.go new file mode 100644 index 0000000..eeef71c --- /dev/null +++ b/model/errors.go @@ -0,0 +1,324 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "strings" +) + +// List of Standard Errors based on the Serverless Workflow specification. +// See: https://github.com/serverlessworkflow/specification/blob/main/dsl-reference.md#standard-error-types +const ( + ErrorTypeConfiguration = "https://serverlessworkflow.io/spec/1.0.0/errors/configuration" + ErrorTypeValidation = "https://serverlessworkflow.io/spec/1.0.0/errors/validation" + ErrorTypeExpression = "https://serverlessworkflow.io/spec/1.0.0/errors/expression" + ErrorTypeAuthentication = "https://serverlessworkflow.io/spec/1.0.0/errors/authentication" + ErrorTypeAuthorization = "https://serverlessworkflow.io/spec/1.0.0/errors/authorization" + ErrorTypeTimeout = "https://serverlessworkflow.io/spec/1.0.0/errors/timeout" + ErrorTypeCommunication = "https://serverlessworkflow.io/spec/1.0.0/errors/communication" + ErrorTypeRuntime = "https://serverlessworkflow.io/spec/1.0.0/errors/runtime" +) + +type Error struct { + // A URI reference that identifies the error type. + // For cross-compatibility concerns, it is strongly recommended to use Standard Error Types whenever possible. + // Runtimes MUST ensure that the property has been set when raising or escalating the error. + Type *URITemplateOrRuntimeExpr `json:"type" validate:"required"` + // The status code generated by the origin for this occurrence of the error. + // For cross-compatibility concerns, it is strongly recommended to use HTTP Status Codes whenever possible. + // Runtimes MUST ensure that the property has been set when raising or escalating the error. + Status int `json:"status" validate:"required"` + // A short, human-readable summary of the error. + Title *StringOrRuntimeExpr `json:"title,omitempty"` + // A human-readable explanation specific to this occurrence of the error. + Detail *StringOrRuntimeExpr `json:"detail,omitempty"` + // A JSON Pointer used to reference the component the error originates from. + // Runtimes MUST set the property when raising or escalating the error. Otherwise ignore. + Instance *JsonPointerOrRuntimeExpression `json:"instance,omitempty" validate:"omitempty"` +} + +type ErrorFilter struct { + Type string `json:"type,omitempty"` + Status int `json:"status,omitempty"` + Instance string `json:"instance,omitempty"` + Title string `json:"title,omitempty"` + Details string `json:"details,omitempty"` +} + +func (e *Error) Error() string { + return fmt.Sprintf("[%d] %s: %s (%s). Origin: '%s'", e.Status, e.Title, e.Detail, e.Type, e.Instance) +} + +// WithInstanceRef ensures the error has a valid JSON Pointer reference +func (e *Error) WithInstanceRef(workflow *Workflow, taskName string) *Error { + if e == nil { + return nil + } + + // Check if the instance is already set + if e.Instance.IsValid() { + return e + } + + // Generate a JSON pointer reference for the task within the workflow + instance, pointerErr := GenerateJSONPointer(workflow, taskName) + if pointerErr == nil { + e.Instance = &JsonPointerOrRuntimeExpression{Value: instance} + } + // TODO: log the pointer error + + return e +} + +// newError creates a new structured error +func newError(errType string, status int, title string, detail error, instance string) *Error { + if detail != nil { + return &Error{ + Type: NewUriTemplate(errType), + Status: status, + Title: NewStringOrRuntimeExpr(title), + Detail: NewStringOrRuntimeExpr(detail.Error()), + Instance: &JsonPointerOrRuntimeExpression{ + Value: instance, + }, + } + } + + return &Error{ + Type: NewUriTemplate(errType), + Status: status, + Title: NewStringOrRuntimeExpr(title), + Instance: &JsonPointerOrRuntimeExpression{ + Value: instance, + }, + } +} + +// Convenience Functions for Standard Errors + +func NewErrConfiguration(detail error, instance string) *Error { + return newError( + ErrorTypeConfiguration, + 400, + "Configuration Error", + detail, + instance, + ) +} + +func NewErrValidation(detail error, instance string) *Error { + return newError( + ErrorTypeValidation, + 400, + "Validation Error", + detail, + instance, + ) +} + +func NewErrExpression(detail error, instance string) *Error { + return newError( + ErrorTypeExpression, + 400, + "Expression Error", + detail, + instance, + ) +} + +func NewErrAuthentication(detail error, instance string) *Error { + return newError( + ErrorTypeAuthentication, + 401, + "Authentication Error", + detail, + instance, + ) +} + +func NewErrAuthorization(detail error, instance string) *Error { + return newError( + ErrorTypeAuthorization, + 403, + "Authorization Error", + detail, + instance, + ) +} + +func NewErrTimeout(detail error, instance string) *Error { + return newError( + ErrorTypeTimeout, + 408, + "Timeout Error", + detail, + instance, + ) +} + +func NewErrCommunication(detail error, instance string) *Error { + return newError( + ErrorTypeCommunication, + 500, + "Communication Error", + detail, + instance, + ) +} + +func NewErrRuntime(detail error, instance string) *Error { + return newError( + ErrorTypeRuntime, + 500, + "Runtime Error", + detail, + instance, + ) +} + +// Error Classification Functions + +func IsErrConfiguration(err error) bool { + return isErrorType(err, ErrorTypeConfiguration) +} + +func IsErrValidation(err error) bool { + return isErrorType(err, ErrorTypeValidation) +} + +func IsErrExpression(err error) bool { + return isErrorType(err, ErrorTypeExpression) +} + +func IsErrAuthentication(err error) bool { + return isErrorType(err, ErrorTypeAuthentication) +} + +func IsErrAuthorization(err error) bool { + return isErrorType(err, ErrorTypeAuthorization) +} + +func IsErrTimeout(err error) bool { + return isErrorType(err, ErrorTypeTimeout) +} + +func IsErrCommunication(err error) bool { + return isErrorType(err, ErrorTypeCommunication) +} + +func IsErrRuntime(err error) bool { + return isErrorType(err, ErrorTypeRuntime) +} + +// Helper function to check error type +func isErrorType(err error, errorType string) bool { + var e *Error + if ok := errors.As(err, &e); ok && strings.EqualFold(e.Type.String(), errorType) { + return true + } + return false +} + +// AsError attempts to extract a known error type from the given error. +// If the error is one of the predefined structured errors, it returns the *Error. +// Otherwise, it returns nil. +func AsError(err error) *Error { + var e *Error + if errors.As(err, &e) { + return e // Successfully extracted as a known error type + } + return nil // Not a known error +} + +// Serialization and Deserialization Functions + +func ErrorToJSON(err *Error) (string, error) { + if err == nil { + return "", fmt.Errorf("error is nil") + } + jsonBytes, marshalErr := json.Marshal(err) + if marshalErr != nil { + return "", fmt.Errorf("failed to marshal error: %w", marshalErr) + } + return string(jsonBytes), nil +} + +func ErrorFromJSON(jsonStr string) (*Error, error) { + var errObj Error + if err := json.Unmarshal([]byte(jsonStr), &errObj); err != nil { + return nil, fmt.Errorf("failed to unmarshal error JSON: %w", err) + } + return &errObj, nil +} + +// JsonPointer functions + +func findJsonPointer(data interface{}, target string, path string) (string, bool) { + switch node := data.(type) { + case map[string]interface{}: + for key, value := range node { + newPath := fmt.Sprintf("%s/%s", path, key) + if key == target { + return newPath, true + } + if result, found := findJsonPointer(value, target, newPath); found { + return result, true + } + } + case []interface{}: + for i, item := range node { + newPath := fmt.Sprintf("%s/%d", path, i) + if result, found := findJsonPointer(item, target, newPath); found { + return result, true + } + } + } + return "", false +} + +// GenerateJSONPointer Function to generate JSON Pointer from a Workflow reference +func GenerateJSONPointer(workflow *Workflow, targetNode interface{}) (string, error) { + // Convert struct to JSON + jsonData, err := json.Marshal(workflow) + if err != nil { + return "", fmt.Errorf("error marshalling to JSON: %w", err) + } + + // Convert JSON to a generic map for traversal + var jsonMap map[string]interface{} + if err := json.Unmarshal(jsonData, &jsonMap); err != nil { + return "", fmt.Errorf("error unmarshalling JSON: %w", err) + } + + transformedNode := "" + switch node := targetNode.(type) { + case string: + transformedNode = node + default: + transformedNode = strings.ToLower(reflect.TypeOf(targetNode).Name()) + } + + // Search for the target node + jsonPointer, found := findJsonPointer(jsonMap, transformedNode, "") + if !found { + return "", fmt.Errorf("node '%s' not found", targetNode) + } + + return jsonPointer, nil +} diff --git a/model/errors_test.go b/model/errors_test.go new file mode 100644 index 0000000..12a00fb --- /dev/null +++ b/model/errors_test.go @@ -0,0 +1,139 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestGenerateJSONPointer_SimpleTask tests a simple workflow task. +func TestGenerateJSONPointer_SimpleTask(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "simple-workflow"}, + Do: &TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"value": 10}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "task2") + assert.NoError(t, err) + assert.Equal(t, "/do/1/task2", jsonPointer) +} + +// TestGenerateJSONPointer_SimpleTask tests a simple workflow task. +func TestGenerateJSONPointer_Document(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "simple-workflow"}, + Do: &TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"value": 10}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, workflow.Document) + assert.NoError(t, err) + assert.Equal(t, "/document", jsonPointer) +} + +func TestGenerateJSONPointer_ForkTask(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "fork-example"}, + Do: &TaskList{ + &TaskItem{ + Key: "raiseAlarm", + Task: &ForkTask{ + Fork: ForkTaskConfiguration{ + Compete: true, + Branches: &TaskList{ + {Key: "callNurse", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "put", Endpoint: NewEndpoint("https://hospital.com/api/alert/nurses")}}}, + {Key: "callDoctor", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "put", Endpoint: NewEndpoint("https://hospital.com/api/alert/doctor")}}}, + }, + }, + }, + }, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "callDoctor") + assert.NoError(t, err) + assert.Equal(t, "/do/0/raiseAlarm/fork/branches/1/callDoctor", jsonPointer) +} + +// TestGenerateJSONPointer_DeepNestedTask tests multiple nested task levels. +func TestGenerateJSONPointer_DeepNestedTask(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "deep-nested"}, + Do: &TaskList{ + &TaskItem{ + Key: "step1", + Task: &ForkTask{ + Fork: ForkTaskConfiguration{ + Compete: false, + Branches: &TaskList{ + { + Key: "branchA", + Task: &ForkTask{ + Fork: ForkTaskConfiguration{ + Branches: &TaskList{ + { + Key: "deepTask", + Task: &SetTask{Set: map[string]interface{}{"result": "done"}}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "deepTask") + assert.NoError(t, err) + assert.Equal(t, "/do/0/step1/fork/branches/0/branchA/fork/branches/0/deepTask", jsonPointer) +} + +// TestGenerateJSONPointer_NonExistentTask checks for a task that doesn't exist. +func TestGenerateJSONPointer_NonExistentTask(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "nonexistent-test"}, + Do: &TaskList{ + &TaskItem{Key: "taskA", Task: &SetTask{Set: map[string]interface{}{"value": 5}}}, + }, + } + + _, err := GenerateJSONPointer(workflow, "taskX") + assert.Error(t, err) +} + +// TestGenerateJSONPointer_MixedTaskTypes verifies a workflow with different task types. +func TestGenerateJSONPointer_MixedTaskTypes(t *testing.T) { + workflow := &Workflow{ + Document: Document{Name: "mixed-tasks"}, + Do: &TaskList{ + &TaskItem{Key: "compute", Task: &SetTask{Set: map[string]interface{}{"result": 42}}}, + &TaskItem{Key: "notify", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "post", Endpoint: NewEndpoint("https://api.notify.com")}}}, + }, + } + + jsonPointer, err := GenerateJSONPointer(workflow, "notify") + assert.NoError(t, err) + assert.Equal(t, "/do/1/notify", jsonPointer) +} diff --git a/model/extension_test.go b/model/extension_test.go index 7a11a5f..f258a4c 100644 --- a/model/extension_test.go +++ b/model/extension_test.go @@ -19,7 +19,7 @@ import ( "errors" "testing" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" ) diff --git a/model/objects.go b/model/objects.go index ecfba00..d79ac55 100644 --- a/model/objects.go +++ b/model/objects.go @@ -21,11 +21,31 @@ import ( "regexp" ) +var _ Object = &ObjectOrString{} +var _ Object = &ObjectOrRuntimeExpr{} +var _ Object = &RuntimeExpression{} +var _ Object = &URITemplateOrRuntimeExpr{} +var _ Object = &StringOrRuntimeExpr{} +var _ Object = &JsonPointerOrRuntimeExpression{} + +type Object interface { + String() string + GetValue() interface{} +} + // ObjectOrString is a type that can hold either a string or an object. type ObjectOrString struct { Value interface{} `validate:"object_or_string"` } +func (o *ObjectOrString) String() string { + return fmt.Sprintf("%v", o.Value) +} + +func (o *ObjectOrString) GetValue() interface{} { + return o.Value +} + // UnmarshalJSON unmarshals data into either a string or an object. func (o *ObjectOrString) UnmarshalJSON(data []byte) error { var asString string @@ -53,6 +73,26 @@ type ObjectOrRuntimeExpr struct { Value interface{} `json:"-" validate:"object_or_runtime_expr"` // Custom validation tag. } +func (o *ObjectOrRuntimeExpr) String() string { + return fmt.Sprintf("%v", o.Value) +} + +func (o *ObjectOrRuntimeExpr) GetValue() interface{} { + return o.Value +} + +func (o *ObjectOrRuntimeExpr) AsStringOrMap() interface{} { + switch o.Value.(type) { + case map[string]interface{}: + return o.Value.(map[string]interface{}) + case string: + return o.Value.(string) + case RuntimeExpression: + return o.Value.(RuntimeExpression).Value + } + return nil +} + // UnmarshalJSON unmarshals data into either a RuntimeExpression or an object. func (o *ObjectOrRuntimeExpr) UnmarshalJSON(data []byte) error { // Attempt to decode as a RuntimeExpression @@ -102,11 +142,21 @@ func (o *ObjectOrRuntimeExpr) Validate() error { return nil } +func NewStringOrRuntimeExpr(value string) *StringOrRuntimeExpr { + return &StringOrRuntimeExpr{ + Value: value, + } +} + // StringOrRuntimeExpr is a type that can hold either a RuntimeExpression or a string. type StringOrRuntimeExpr struct { Value interface{} `json:"-" validate:"string_or_runtime_expr"` // Custom validation tag. } +func (s *StringOrRuntimeExpr) AsObjectOrRuntimeExpr() *ObjectOrRuntimeExpr { + return &ObjectOrRuntimeExpr{Value: s.Value} +} + // UnmarshalJSON unmarshals data into either a RuntimeExpression or a string. func (s *StringOrRuntimeExpr) UnmarshalJSON(data []byte) error { // Attempt to decode as a RuntimeExpression @@ -150,6 +200,10 @@ func (s *StringOrRuntimeExpr) String() string { } } +func (s *StringOrRuntimeExpr) GetValue() interface{} { + return s.Value +} + // URITemplateOrRuntimeExpr represents a type that can be a URITemplate or a RuntimeExpression. type URITemplateOrRuntimeExpr struct { Value interface{} `json:"-" validate:"uri_template_or_runtime_expr"` // Custom validation. @@ -211,10 +265,16 @@ func (u *URITemplateOrRuntimeExpr) String() string { return v.String() case RuntimeExpression: return v.String() + case string: + return v } return "" } +func (u *URITemplateOrRuntimeExpr) GetValue() interface{} { + return u.Value +} + // JsonPointerOrRuntimeExpression represents a type that can be a JSON Pointer or a RuntimeExpression. type JsonPointerOrRuntimeExpression struct { Value interface{} `json:"-" validate:"json_pointer_or_runtime_expr"` // Custom validation tag. @@ -258,3 +318,22 @@ func (j *JsonPointerOrRuntimeExpression) MarshalJSON() ([]byte, error) { return nil, fmt.Errorf("JsonPointerOrRuntimeExpression contains unsupported type") } } + +func (j *JsonPointerOrRuntimeExpression) String() string { + switch v := j.Value.(type) { + case RuntimeExpression: + return v.String() + case string: + return v + default: + return "" + } +} + +func (j *JsonPointerOrRuntimeExpression) GetValue() interface{} { + return j.Value +} + +func (j *JsonPointerOrRuntimeExpression) IsValid() bool { + return JSONPointerPattern.MatchString(j.String()) +} diff --git a/model/runtime_expression.go b/model/runtime_expression.go index c67a3ef..6a056cb 100644 --- a/model/runtime_expression.go +++ b/model/runtime_expression.go @@ -17,8 +17,8 @@ package model import ( "encoding/json" "fmt" - "github.com/itchyny/gojq" - "strings" + + "github.com/serverlessworkflow/sdk-go/v3/expr" ) // RuntimeExpression represents a runtime expression. @@ -34,22 +34,9 @@ func NewExpr(runtimeExpression string) *RuntimeExpression { return &RuntimeExpression{Value: runtimeExpression} } -// preprocessExpression removes `${}` if present and returns the inner content. -func preprocessExpression(expression string) string { - if strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") { - return strings.TrimSpace(expression[2 : len(expression)-1]) - } - return expression // Return the expression as-is if `${}` are not present -} - // IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. func (r *RuntimeExpression) IsValid() bool { - // Preprocess to extract content inside `${}` if present - processedExpr := preprocessExpression(r.Value) - - // Validate the processed expression using gojq - _, err := gojq.Parse(processedExpr) - return err == nil + return expr.IsValid(r.Value) } // UnmarshalJSON implements custom unmarshalling for RuntimeExpression. @@ -79,3 +66,7 @@ func (r *RuntimeExpression) MarshalJSON() ([]byte, error) { func (r *RuntimeExpression) String() string { return r.Value } + +func (r *RuntimeExpression) GetValue() interface{} { + return r.Value +} diff --git a/model/task.go b/model/task.go index 3bbeb4d..4edbd40 100644 --- a/model/task.go +++ b/model/task.go @@ -36,33 +36,8 @@ type TaskBase struct { } // Task represents a discrete unit of work in a workflow. -type Task interface{} - -// TaskItem represents a named task and its associated definition. -type TaskItem struct { - Key string `json:"-" validate:"required"` - Task Task `json:"-" validate:"required"` -} - -// MarshalJSON for TaskItem to ensure proper serialization as a key-value pair. -func (ti *TaskItem) MarshalJSON() ([]byte, error) { - if ti == nil { - return nil, fmt.Errorf("cannot marshal a nil TaskItem") - } - - // Serialize the Task - taskJSON, err := json.Marshal(ti.Task) - if err != nil { - return nil, fmt.Errorf("failed to marshal task: %w", err) - } - - // Create a map with the Key and Task - taskEntry := map[string]json.RawMessage{ - ti.Key: taskJSON, - } - - // Marshal the map into JSON - return json.Marshal(taskEntry) +type Task interface { + GetBase() *TaskBase } type NamedTaskMap map[string]Task @@ -92,6 +67,28 @@ func (ntm *NamedTaskMap) UnmarshalJSON(data []byte) error { // TaskList represents a list of named tasks to perform. type TaskList []*TaskItem +// Next gets the next item in the list based on the current index +func (tl *TaskList) Next(currentIdx int) (int, *TaskItem) { + if currentIdx == -1 || currentIdx >= len(*tl) { + return -1, nil + } + + current := (*tl)[currentIdx] + if current.GetBase() != nil && current.GetBase().Then != nil { + then := current.GetBase().Then + if then.IsTermination() { + return -1, nil + } + return tl.KeyAndIndex(then.Value) + } + + // Proceed sequentially if no 'then' is specified + if currentIdx+1 < len(*tl) { + return currentIdx + 1, (*tl)[currentIdx+1] + } + return -1, nil +} + // UnmarshalJSON for TaskList to ensure proper deserialization. func (tl *TaskList) UnmarshalJSON(data []byte) error { var rawTasks []json.RawMessage @@ -146,6 +143,8 @@ func unmarshalTask(key string, taskRaw json.RawMessage) (Task, error) { return nil, fmt.Errorf("failed to parse task type for key '%s': %w", key, err) } + // TODO: not the most elegant; can be improved in a smarter way + // Determine task type var task Task if callValue, hasCall := taskType["call"].(string); hasCall { @@ -157,8 +156,11 @@ func unmarshalTask(key string, taskRaw json.RawMessage) (Task, error) { // Default to CallFunction for unrecognized call values task = &CallFunction{} } + } else if _, hasFor := taskType["for"]; hasFor { + // Handle special case "for" that also has "do" + task = taskTypeRegistry["for"]() } else { - // Handle non-call tasks (e.g., "do", "fork") + // Handle everything else (e.g., "do", "fork") for typeKey := range taskType { if constructor, exists := taskTypeRegistry[typeKey]; exists { task = constructor() @@ -186,59 +188,49 @@ func (tl *TaskList) MarshalJSON() ([]byte, error) { // Key retrieves a TaskItem by its key. func (tl *TaskList) Key(key string) *TaskItem { - for _, item := range *tl { + _, keyItem := tl.KeyAndIndex(key) + return keyItem +} + +func (tl *TaskList) KeyAndIndex(key string) (int, *TaskItem) { + for i, item := range *tl { if item.Key == key { - return item + return i, item } } - return nil + // TODO: Add logging here for missing task references + return -1, nil } -// AsTask extracts the TaskBase from the Task if the Task embeds TaskBase. -// Returns nil if the Task does not embed TaskBase. -func (ti *TaskItem) AsTask() *TaskBase { - if ti == nil || ti.Task == nil { - return nil +// TaskItem represents a named task and its associated definition. +type TaskItem struct { + Key string `json:"-" validate:"required"` + Task Task `json:"-" validate:"required"` +} + +// MarshalJSON for TaskItem to ensure proper serialization as a key-value pair. +func (ti *TaskItem) MarshalJSON() ([]byte, error) { + if ti == nil { + return nil, fmt.Errorf("cannot marshal a nil TaskItem") } - // Use type assertions to check for TaskBase - switch task := ti.Task.(type) { - case *CallHTTP: - return &task.TaskBase - case *CallOpenAPI: - return &task.TaskBase - case *CallGRPC: - return &task.TaskBase - case *CallAsyncAPI: - return &task.TaskBase - case *CallFunction: - return &task.TaskBase - case *DoTask: - return &task.TaskBase - case *ForkTask: - return &task.TaskBase - case *EmitTask: - return &task.TaskBase - case *ForTask: - return &task.TaskBase - case *ListenTask: - return &task.TaskBase - case *RaiseTask: - return &task.TaskBase - case *RunTask: - return &task.TaskBase - case *SetTask: - return &task.TaskBase - case *SwitchTask: - return &task.TaskBase - case *TryTask: - return &task.TaskBase - case *WaitTask: - return &task.TaskBase - default: - // If the type does not embed TaskBase, return nil - return nil + // Serialize the Task + taskJSON, err := json.Marshal(ti.Task) + if err != nil { + return nil, fmt.Errorf("failed to marshal task: %w", err) + } + + // Create a map with the Key and Task + taskEntry := map[string]json.RawMessage{ + ti.Key: taskJSON, } + + // Marshal the map into JSON + return json.Marshal(taskEntry) +} + +func (ti *TaskItem) GetBase() *TaskBase { + return ti.Task.GetBase() } // AsCallHTTPTask casts the Task to a CallTask if possible, returning nil if the cast fails. diff --git a/model/task_call.go b/model/task_call.go index 82412b0..c3e83df 100644 --- a/model/task_call.go +++ b/model/task_call.go @@ -22,6 +22,10 @@ type CallHTTP struct { With HTTPArguments `json:"with" validate:"required"` } +func (c *CallHTTP) GetBase() *TaskBase { + return &c.TaskBase +} + type HTTPArguments struct { Method string `json:"method" validate:"required,oneofci=GET POST PUT DELETE PATCH"` Endpoint *Endpoint `json:"endpoint" validate:"required"` @@ -37,6 +41,10 @@ type CallOpenAPI struct { With OpenAPIArguments `json:"with" validate:"required"` } +func (c *CallOpenAPI) GetBase() *TaskBase { + return &c.TaskBase +} + type OpenAPIArguments struct { Document *ExternalResource `json:"document" validate:"required"` OperationID string `json:"operationId" validate:"required"` @@ -51,6 +59,10 @@ type CallGRPC struct { With GRPCArguments `json:"with" validate:"required"` } +func (c *CallGRPC) GetBase() *TaskBase { + return &c.TaskBase +} + type GRPCArguments struct { Proto *ExternalResource `json:"proto" validate:"required"` Service GRPCService `json:"service" validate:"required"` @@ -72,6 +84,10 @@ type CallAsyncAPI struct { With AsyncAPIArguments `json:"with" validate:"required"` } +func (c *CallAsyncAPI) GetBase() *TaskBase { + return &c.TaskBase +} + type AsyncAPIArguments struct { Document *ExternalResource `json:"document" validate:"required"` Channel string `json:"channel,omitempty"` @@ -110,3 +126,7 @@ type CallFunction struct { Call string `json:"call" validate:"required"` With map[string]interface{} `json:"with,omitempty"` } + +func (c *CallFunction) GetBase() *TaskBase { + return &c.TaskBase +} diff --git a/model/task_do.go b/model/task_do.go index 0b2673d..f1dca25 100644 --- a/model/task_do.go +++ b/model/task_do.go @@ -19,3 +19,7 @@ type DoTask struct { TaskBase `json:",inline"` // Inline TaskBase fields Do *TaskList `json:"do" validate:"required,dive"` } + +func (d *DoTask) GetBase() *TaskBase { + return &d.TaskBase +} diff --git a/model/task_event.go b/model/task_event.go index 8b97388..5df1ab6 100644 --- a/model/task_event.go +++ b/model/task_event.go @@ -26,6 +26,10 @@ type EmitTask struct { Emit EmitTaskConfiguration `json:"emit" validate:"required"` } +func (e *EmitTask) GetBase() *TaskBase { + return &e.TaskBase +} + func (e *EmitTask) MarshalJSON() ([]byte, error) { type Alias EmitTask // Prevent recursion return json.Marshal((*Alias)(e)) @@ -37,6 +41,10 @@ type ListenTask struct { Listen ListenTaskConfiguration `json:"listen" validate:"required"` } +func (lt *ListenTask) GetBase() *TaskBase { + return <.TaskBase +} + type ListenTaskConfiguration struct { To *EventConsumptionStrategy `json:"to" validate:"required"` } diff --git a/model/task_for.go b/model/task_for.go index 0e6811b..5fc84ec 100644 --- a/model/task_for.go +++ b/model/task_for.go @@ -22,6 +22,10 @@ type ForTask struct { Do *TaskList `json:"do" validate:"required,dive"` } +func (f *ForTask) GetBase() *TaskBase { + return &f.TaskBase +} + // ForTaskConfiguration defines the loop configuration for iterating over a collection. type ForTaskConfiguration struct { Each string `json:"each,omitempty"` // Variable name for the current item diff --git a/model/task_for_test.go b/model/task_for_test.go index e24bf3b..3d8fc37 100644 --- a/model/task_for_test.go +++ b/model/task_for_test.go @@ -16,9 +16,10 @@ package model import ( "encoding/json" - "sigs.k8s.io/yaml" "testing" + "sigs.k8s.io/yaml" + "github.com/stretchr/testify/assert" ) diff --git a/model/task_fork.go b/model/task_fork.go index 3019d06..1511729 100644 --- a/model/task_fork.go +++ b/model/task_fork.go @@ -20,6 +20,10 @@ type ForkTask struct { Fork ForkTaskConfiguration `json:"fork" validate:"required"` } +func (f *ForkTask) GetBase() *TaskBase { + return &f.TaskBase +} + // ForkTaskConfiguration defines the configuration for the branches to perform concurrently. type ForkTaskConfiguration struct { Branches *TaskList `json:"branches" validate:"required,dive"` diff --git a/model/task_raise.go b/model/task_raise.go index b0c7499..5dafd55 100644 --- a/model/task_raise.go +++ b/model/task_raise.go @@ -19,28 +19,16 @@ import ( "errors" ) -type Error struct { - Type *URITemplateOrRuntimeExpr `json:"type" validate:"required"` - Status int `json:"status" validate:"required"` - Title string `json:"title,omitempty"` - Detail string `json:"detail,omitempty"` - Instance *JsonPointerOrRuntimeExpression `json:"instance,omitempty" validate:"omitempty"` -} - -type ErrorFilter struct { - Type string `json:"type,omitempty"` - Status int `json:"status,omitempty"` - Instance string `json:"instance,omitempty"` - Title string `json:"title,omitempty"` - Details string `json:"details,omitempty"` -} - // RaiseTask represents a task configuration to raise errors. type RaiseTask struct { TaskBase `json:",inline"` // Inline TaskBase fields Raise RaiseTaskConfiguration `json:"raise" validate:"required"` } +func (r *RaiseTask) GetBase() *TaskBase { + return &r.TaskBase +} + type RaiseTaskConfiguration struct { Error RaiseTaskError `json:"error" validate:"required"` } diff --git a/model/task_raise_test.go b/model/task_raise_test.go index 49ede54..1aa3d3b 100644 --- a/model/task_raise_test.go +++ b/model/task_raise_test.go @@ -38,8 +38,8 @@ func TestRaiseTask_MarshalJSON(t *testing.T) { Definition: &Error{ Type: &URITemplateOrRuntimeExpr{Value: "http://example.com/error"}, Status: 500, - Title: "Internal Server Error", - Detail: "An unexpected error occurred.", + Title: NewStringOrRuntimeExpr("Internal Server Error"), + Detail: NewStringOrRuntimeExpr("An unexpected error occurred."), }, }, }, @@ -94,6 +94,6 @@ func TestRaiseTask_UnmarshalJSON(t *testing.T) { assert.Equal(t, map[string]interface{}{"meta": "data"}, raiseTask.Metadata) assert.Equal(t, "http://example.com/error", raiseTask.Raise.Error.Definition.Type.String()) assert.Equal(t, 500, raiseTask.Raise.Error.Definition.Status) - assert.Equal(t, "Internal Server Error", raiseTask.Raise.Error.Definition.Title) - assert.Equal(t, "An unexpected error occurred.", raiseTask.Raise.Error.Definition.Detail) + assert.Equal(t, "Internal Server Error", raiseTask.Raise.Error.Definition.Title.String()) + assert.Equal(t, "An unexpected error occurred.", raiseTask.Raise.Error.Definition.Detail.String()) } diff --git a/model/task_run.go b/model/task_run.go index 6942013..b589cfa 100644 --- a/model/task_run.go +++ b/model/task_run.go @@ -25,6 +25,10 @@ type RunTask struct { Run RunTaskConfiguration `json:"run" validate:"required"` } +func (r *RunTask) GetBase() *TaskBase { + return &r.TaskBase +} + type RunTaskConfiguration struct { Await *bool `json:"await,omitempty"` Container *Container `json:"container,omitempty"` diff --git a/model/task_set.go b/model/task_set.go index 654c48f..68816ba 100644 --- a/model/task_set.go +++ b/model/task_set.go @@ -22,6 +22,10 @@ type SetTask struct { Set map[string]interface{} `json:"set" validate:"required,min=1,dive"` } +func (st *SetTask) GetBase() *TaskBase { + return &st.TaskBase +} + // MarshalJSON for SetTask to ensure proper serialization. func (st *SetTask) MarshalJSON() ([]byte, error) { type Alias SetTask diff --git a/model/task_switch.go b/model/task_switch.go index d63b2e7..89ca9c1 100644 --- a/model/task_switch.go +++ b/model/task_switch.go @@ -22,6 +22,10 @@ type SwitchTask struct { Switch []SwitchItem `json:"switch" validate:"required,min=1,dive,switch_item"` } +func (st *SwitchTask) GetBase() *TaskBase { + return &st.TaskBase +} + type SwitchItem map[string]SwitchCase // SwitchCase defines a condition and the corresponding outcome for a switch task. diff --git a/model/task_test.go b/model/task_test.go index 6fa5019..fdd07cf 100644 --- a/model/task_test.go +++ b/model/task_test.go @@ -19,7 +19,7 @@ import ( "errors" "testing" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" ) @@ -119,3 +119,70 @@ func TestTaskList_Validation(t *testing.T) { } } + +func TestTaskList_Next_Sequential(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + &TaskItem{Key: "task3", Task: &SetTask{Set: map[string]interface{}{"key3": "value3"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task2", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task3", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_WithThenDirective(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "task3"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + &TaskItem{Key: "task3", Task: &SetTask{Set: map[string]interface{}{"key3": "value3"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Equal(t, "task3", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_Termination(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "end"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} + +func TestTaskList_Next_InvalidThenReference(t *testing.T) { + tasks := TaskList{ + &TaskItem{Key: "task1", Task: &SetTask{TaskBase: TaskBase{Then: &FlowDirective{Value: "unknown"}}, Set: map[string]interface{}{"key1": "value1"}}}, + &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"key2": "value2"}}}, + } + + idx, currentTask := 0, tasks[0] + assert.Equal(t, "task1", currentTask.Key) + + idx, currentTask = tasks.Next(idx) + assert.Nil(t, currentTask) + assert.Equal(t, -1, idx) +} diff --git a/model/task_try.go b/model/task_try.go index 91d3797..57ba9df 100644 --- a/model/task_try.go +++ b/model/task_try.go @@ -26,6 +26,10 @@ type TryTask struct { Catch *TryTaskCatch `json:"catch" validate:"required"` } +func (t *TryTask) GetBase() *TaskBase { + return &t.TaskBase +} + type TryTaskCatch struct { Errors struct { With *ErrorFilter `json:"with,omitempty"` diff --git a/model/task_wait.go b/model/task_wait.go index 41b5cc5..e312824 100644 --- a/model/task_wait.go +++ b/model/task_wait.go @@ -25,6 +25,10 @@ type WaitTask struct { Wait *Duration `json:"wait" validate:"required"` } +func (wt *WaitTask) GetBase() *TaskBase { + return &wt.TaskBase +} + // MarshalJSON for WaitTask to ensure proper serialization. func (wt *WaitTask) MarshalJSON() ([]byte, error) { type Alias WaitTask diff --git a/model/validator.go b/model/validator.go index 91c34b9..60b87b8 100644 --- a/model/validator.go +++ b/model/validator.go @@ -17,9 +17,10 @@ package model import ( "errors" "fmt" - "github.com/go-playground/validator/v10" "regexp" "strings" + + validator "github.com/go-playground/validator/v10" ) var ( diff --git a/model/workflow.go b/model/workflow.go index 17973e1..313a9e5 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -221,6 +221,11 @@ func (f *FlowDirective) IsEnum() bool { return exists } +// IsTermination checks if the FlowDirective matches FlowDirectiveExit or FlowDirectiveEnd. +func (f *FlowDirective) IsTermination() bool { + return f.Value == string(FlowDirectiveExit) || f.Value == string(FlowDirectiveEnd) +} + func (f *FlowDirective) UnmarshalJSON(data []byte) error { var value string if err := json.Unmarshal(data, &value); err != nil { diff --git a/model/workflow_test.go b/model/workflow_test.go index df90f1e..c88de64 100644 --- a/model/workflow_test.go +++ b/model/workflow_test.go @@ -19,7 +19,7 @@ import ( "errors" "testing" - "github.com/go-playground/validator/v10" + validator "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" ) @@ -283,7 +283,7 @@ type InputTestCase struct { func TestInputValidation(t *testing.T) { cases := []InputTestCase{ { - Name: "Valid Input with Schema and From (object)", + Name: "Valid input with Schema and From (object)", Input: Input{ Schema: &Schema{ Format: "json", @@ -301,7 +301,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: false, }, { - Name: "Invalid Input with Schema and From (expr)", + Name: "Invalid input with Schema and From (expr)", Input: Input{ Schema: &Schema{ Format: "json", @@ -313,7 +313,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: true, }, { - Name: "Valid Input with Schema and From (expr)", + Name: "Valid input with Schema and From (expr)", Input: Input{ Schema: &Schema{ Format: "json", @@ -325,7 +325,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: true, }, { - Name: "Invalid Input with Empty From (expr)", + Name: "Invalid input with Empty From (expr)", Input: Input{ From: &ObjectOrRuntimeExpr{ Value: "", @@ -334,7 +334,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: true, }, { - Name: "Invalid Input with Empty From (object)", + Name: "Invalid input with Empty From (object)", Input: Input{ From: &ObjectOrRuntimeExpr{ Value: map[string]interface{}{}, @@ -343,7 +343,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: true, }, { - Name: "Invalid Input with Unsupported From Type", + Name: "Invalid input with Unsupported From Type", Input: Input{ From: &ObjectOrRuntimeExpr{ Value: 123, @@ -352,7 +352,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: true, }, { - Name: "Valid Input with Schema Only", + Name: "Valid input with Schema Only", Input: Input{ Schema: &Schema{ Format: "json", @@ -361,7 +361,7 @@ func TestInputValidation(t *testing.T) { ShouldErr: false, }, { - Name: "Input with Neither Schema Nor From", + Name: "input with Neither Schema Nor From", Input: Input{}, ShouldErr: false, }, diff --git a/parser/cmd/main.go b/parser/cmd/main.go index e811696..b90b902 100644 --- a/parser/cmd/main.go +++ b/parser/cmd/main.go @@ -16,9 +16,10 @@ package main import ( "fmt" - "github.com/serverlessworkflow/sdk-go/v3/parser" "os" "path/filepath" + + "github.com/serverlessworkflow/sdk-go/v3/parser" ) func main() { From 4072331e14a482e1d4f68f723ebdafa0aa17862c Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Tue, 25 Mar 2025 10:31:00 -0300 Subject: [PATCH 101/110] chores: upgrade golang.org/x/net to v0.37.0 (#228) Signed-off-by: Ricardo Zanini --- go.mod | 20 +++++++++++--------- go.sum | 27 ++++++++++++++------------- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/go.mod b/go.mod index 15c63e3..32f8859 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,15 @@ module github.com/serverlessworkflow/sdk-go/v3 -go 1.22 +go 1.23.0 + +toolchain go1.24.0 require ( - github.com/go-playground/validator/v10 v10.24.0 + github.com/go-playground/validator/v10 v10.25.0 github.com/itchyny/gojq v0.12.17 github.com/stretchr/testify v1.10.0 github.com/tidwall/gjson v1.18.0 + github.com/xeipuuv/gojsonschema v1.2.0 sigs.k8s.io/yaml v1.4.0 ) @@ -15,18 +18,17 @@ require ( github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/google/go-cmp v0.6.0 // indirect + github.com/google/go-cmp v0.7.0 // indirect github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect - github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - github.com/xeipuuv/gojsonschema v1.2.0 // indirect - golang.org/x/crypto v0.32.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/sys v0.29.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/crypto v0.36.0 // indirect + golang.org/x/net v0.37.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.23.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 3a19f04..80ed15c 100644 --- a/go.sum +++ b/go.sum @@ -9,11 +9,11 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg= -github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= +github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8= +github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= @@ -34,20 +34,21 @@ github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JT github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= -golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= From 5237b8645774f8d384edcac39d0d8a5566cb39f0 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 28 Mar 2025 13:07:01 -0300 Subject: [PATCH 102/110] Fix #229 - Refactor JQ Expr processing and For Implementation (#231) * Refactor expr packaged and shared context Signed-off-by: Ricardo Zanini * Fix #229 - Implement For task and refactor jq expr into context Signed-off-by: Ricardo Zanini * Add missing headers Signed-off-by: Ricardo Zanini * Add nolint:unused Signed-off-by: Ricardo Zanini --------- Signed-off-by: Ricardo Zanini --- README.md | 2 +- expr/expr.go | 112 ----- go.mod | 2 + go.sum | 4 + impl/context.go | 151 ------- impl/ctx/context.go | 407 ++++++++++++++++++ impl/{ => ctx}/status_phase.go | 2 +- impl/expr/expr.go | 133 ++++++ impl/expr/expr_test.go | 263 +++++++++++ impl/json_pointer.go | 77 ++++ .../json_pointer_test.go | 84 ++-- impl/runner.go | 134 ++++-- impl/{task_runner_test.go => runner_test.go} | 107 ++++- impl/task_runner.go | 247 ++--------- impl/task_runner_do.go | 78 ++-- impl/task_runner_for.go | 135 ++++++ impl/task_runner_raise.go | 105 +++++ impl/task_runner_raise_test.go | 15 +- impl/task_runner_set.go | 56 +++ impl/task_set_test.go | 26 +- impl/testdata/for_nested_loops.yaml | 35 ++ impl/testdata/for_sum_numbers.yaml | 30 ++ impl/testdata/raise_inline.yaml | 2 +- impl/utils.go | 31 +- model/errors.go | 63 +-- model/objects.go | 6 + model/runtime_expression.go | 31 +- model/runtime_expression_test.go | 149 +++++++ model/workflow.go | 14 + 29 files changed, 1811 insertions(+), 690 deletions(-) delete mode 100644 expr/expr.go delete mode 100644 impl/context.go create mode 100644 impl/ctx/context.go rename impl/{ => ctx}/status_phase.go (99%) create mode 100644 impl/expr/expr.go create mode 100644 impl/expr/expr_test.go create mode 100644 impl/json_pointer.go rename model/errors_test.go => impl/json_pointer_test.go (53%) rename impl/{task_runner_test.go => runner_test.go} (81%) create mode 100644 impl/task_runner_for.go create mode 100644 impl/task_runner_raise.go create mode 100644 impl/task_runner_set.go create mode 100644 impl/testdata/for_nested_loops.yaml create mode 100644 impl/testdata/for_sum_numbers.yaml diff --git a/README.md b/README.md index 9daabf0..f05e54c 100644 --- a/README.md +++ b/README.md @@ -126,7 +126,7 @@ The table below lists the current state of this implementation. This table is a | Task Call | ❌ | | Task Do | βœ… | | Task Emit | ❌ | -| Task For | ❌ | +| Task For | βœ… | | Task Fork | ❌ | | Task Listen | ❌ | | Task Raise | βœ… | diff --git a/expr/expr.go b/expr/expr.go deleted file mode 100644 index cd5a755..0000000 --- a/expr/expr.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2025 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expr - -import ( - "errors" - "fmt" - "strings" - - "github.com/itchyny/gojq" -) - -// IsStrictExpr returns true if the string is enclosed in `${ }` -func IsStrictExpr(expression string) bool { - return strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") -} - -// Sanitize processes the expression to ensure it's ready for evaluation -// It removes `${}` if present and replaces single quotes with double quotes -func Sanitize(expression string) string { - // Remove `${}` enclosure if present - if IsStrictExpr(expression) { - expression = strings.TrimSpace(expression[2 : len(expression)-1]) - } - - // Replace single quotes with double quotes - expression = strings.ReplaceAll(expression, "'", "\"") - - return expression -} - -// IsValid tries to parse and check if the given value is a valid expression -func IsValid(expression string) bool { - expression = Sanitize(expression) - _, err := gojq.Parse(expression) - return err == nil -} - -// TraverseAndEvaluate recursively processes and evaluates all expressions in a JSON-like structure -func TraverseAndEvaluate(node interface{}, input interface{}) (interface{}, error) { - switch v := node.(type) { - case map[string]interface{}: - // Traverse map - for key, value := range v { - evaluatedValue, err := TraverseAndEvaluate(value, input) - if err != nil { - return nil, err - } - v[key] = evaluatedValue - } - return v, nil - - case []interface{}: - // Traverse array - for i, value := range v { - evaluatedValue, err := TraverseAndEvaluate(value, input) - if err != nil { - return nil, err - } - v[i] = evaluatedValue - } - return v, nil - - case string: - // Check if the string is a runtime expression (e.g., ${ .some.path }) - if IsStrictExpr(v) { - return evaluateJQExpression(Sanitize(v), input) - } - return v, nil - - default: - // Return other types as-is - return v, nil - } -} - -// TODO: add support to variables see https://github.com/itchyny/gojq/blob/main/option_variables_test.go - -// evaluateJQExpression evaluates a jq expression against a given JSON input -func evaluateJQExpression(expression string, input interface{}) (interface{}, error) { - // Parse the sanitized jq expression - query, err := gojq.Parse(expression) - if err != nil { - return nil, fmt.Errorf("failed to parse jq expression: %s, error: %w", expression, err) - } - - // Compile and evaluate the expression - iter := query.Run(input) - result, ok := iter.Next() - if !ok { - return nil, errors.New("no result from jq evaluation") - } - - // Check if an error occurred during evaluation - if err, isErr := result.(error); isErr { - return nil, fmt.Errorf("jq evaluation error: %w", err) - } - - return result, nil -} diff --git a/go.mod b/go.mod index 32f8859..e7947a8 100644 --- a/go.mod +++ b/go.mod @@ -19,9 +19,11 @@ require ( github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/google/go-cmp v0.7.0 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/relvacode/iso8601 v1.6.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect diff --git a/go.sum b/go.sum index 80ed15c..e6e3d38 100644 --- a/go.sum +++ b/go.sum @@ -14,6 +14,8 @@ github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1 github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= @@ -23,6 +25,8 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/relvacode/iso8601 v1.6.0 h1:eFXUhMJN3Gz8Rcq82f9DTMW0svjtAVuIEULglM7QHTU= +github.com/relvacode/iso8601 v1.6.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= diff --git a/impl/context.go b/impl/context.go deleted file mode 100644 index ae9375e..0000000 --- a/impl/context.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2025 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package impl - -import ( - "context" - "errors" - "sync" -) - -type ctxKey string - -const runnerCtxKey ctxKey = "wfRunnerContext" - -// WorkflowContext holds the necessary data for the workflow execution within the instance. -type WorkflowContext struct { - mu sync.Mutex - input interface{} // input can hold any type - output interface{} // output can hold any type - context map[string]interface{} - StatusPhase []StatusPhaseLog - TasksStatusPhase map[string][]StatusPhaseLog // Holds `$context` as the key -} - -type TaskContext interface { - SetTaskStatus(task string, status StatusPhase) -} - -func (ctx *WorkflowContext) SetStatus(status StatusPhase) { - ctx.mu.Lock() - defer ctx.mu.Unlock() - if ctx.StatusPhase == nil { - ctx.StatusPhase = []StatusPhaseLog{} - } - ctx.StatusPhase = append(ctx.StatusPhase, NewStatusPhaseLog(status)) -} - -func (ctx *WorkflowContext) SetTaskStatus(task string, status StatusPhase) { - ctx.mu.Lock() - defer ctx.mu.Unlock() - if ctx.TasksStatusPhase == nil { - ctx.TasksStatusPhase = map[string][]StatusPhaseLog{} - } - ctx.TasksStatusPhase[task] = append(ctx.TasksStatusPhase[task], NewStatusPhaseLog(status)) -} - -// SetInstanceCtx safely sets the `$context` value -func (ctx *WorkflowContext) SetInstanceCtx(value interface{}) { - ctx.mu.Lock() - defer ctx.mu.Unlock() - if ctx.context == nil { - ctx.context = make(map[string]interface{}) - } - ctx.context["$context"] = value -} - -// GetInstanceCtx safely retrieves the `$context` value -func (ctx *WorkflowContext) GetInstanceCtx() interface{} { - ctx.mu.Lock() - defer ctx.mu.Unlock() - if ctx.context == nil { - return nil - } - return ctx.context["$context"] -} - -// SetInput safely sets the input -func (ctx *WorkflowContext) SetInput(input interface{}) { - ctx.mu.Lock() - defer ctx.mu.Unlock() - ctx.input = input -} - -// GetInput safely retrieves the input -func (ctx *WorkflowContext) GetInput() interface{} { - ctx.mu.Lock() - defer ctx.mu.Unlock() - return ctx.input -} - -// SetOutput safely sets the output -func (ctx *WorkflowContext) SetOutput(output interface{}) { - ctx.mu.Lock() - defer ctx.mu.Unlock() - ctx.output = output -} - -// GetOutput safely retrieves the output -func (ctx *WorkflowContext) GetOutput() interface{} { - ctx.mu.Lock() - defer ctx.mu.Unlock() - return ctx.output -} - -// GetInputAsMap safely retrieves the input as a map[string]interface{}. -// If input is not a map, it creates a map with an empty string key and the input as the value. -func (ctx *WorkflowContext) GetInputAsMap() map[string]interface{} { - ctx.mu.Lock() - defer ctx.mu.Unlock() - - if inputMap, ok := ctx.input.(map[string]interface{}); ok { - return inputMap - } - - // If input is not a map, create a map with an empty key and set input as the value - return map[string]interface{}{ - "": ctx.input, - } -} - -// GetOutputAsMap safely retrieves the output as a map[string]interface{}. -// If output is not a map, it creates a map with an empty string key and the output as the value. -func (ctx *WorkflowContext) GetOutputAsMap() map[string]interface{} { - ctx.mu.Lock() - defer ctx.mu.Unlock() - - if outputMap, ok := ctx.output.(map[string]interface{}); ok { - return outputMap - } - - // If output is not a map, create a map with an empty key and set output as the value - return map[string]interface{}{ - "": ctx.output, - } -} - -// WithWorkflowContext adds the WorkflowContext to a parent context -func WithWorkflowContext(parent context.Context, wfCtx *WorkflowContext) context.Context { - return context.WithValue(parent, runnerCtxKey, wfCtx) -} - -// GetWorkflowContext retrieves the WorkflowContext from a context -func GetWorkflowContext(ctx context.Context) (*WorkflowContext, error) { - wfCtx, ok := ctx.Value(runnerCtxKey).(*WorkflowContext) - if !ok { - return nil, errors.New("workflow context not found") - } - return wfCtx, nil -} diff --git a/impl/ctx/context.go b/impl/ctx/context.go new file mode 100644 index 0000000..1f0d716 --- /dev/null +++ b/impl/ctx/context.go @@ -0,0 +1,407 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ctx + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "github.com/google/uuid" + "github.com/serverlessworkflow/sdk-go/v3/model" + "sync" + "time" +) + +var ErrWorkflowContextNotFound = errors.New("workflow context not found") + +var _ WorkflowContext = &workflowContext{} + +type ctxKey string + +const ( + runnerCtxKey ctxKey = "wfRunnerContext" + + varsContext = "$context" + varsInput = "$input" + varsOutput = "$output" + varsWorkflow = "$workflow" + varsRuntime = "$runtime" + varsTask = "$task" + + // TODO: script during the release to update this value programmatically + runtimeVersion = "v3.1.0" + runtimeName = "CNCF Serverless Workflow Specification Go SDK" +) + +type WorkflowContext interface { + SetStartedAt(t time.Time) + SetStatus(status StatusPhase) + SetRawInput(input interface{}) + SetInstanceCtx(value interface{}) + GetInstanceCtx() interface{} + SetInput(input interface{}) + GetInput() interface{} + SetOutput(output interface{}) + GetOutput() interface{} + GetOutputAsMap() map[string]interface{} + GetVars() map[string]interface{} + SetTaskStatus(task string, status StatusPhase) + SetTaskRawInput(input interface{}) + SetTaskRawOutput(output interface{}) + SetTaskDef(task model.Task) error + SetTaskStartedAt(startedAt time.Time) + SetTaskName(name string) + SetTaskReference(ref string) + GetTaskReference() string + ClearTaskContext() + SetLocalExprVars(vars map[string]interface{}) + AddLocalExprVars(vars map[string]interface{}) + RemoveLocalExprVars(keys ...string) +} + +// workflowContext holds the necessary data for the workflow execution within the instance. +type workflowContext struct { + mu sync.Mutex + input interface{} // $input can hold any type + output interface{} // $output can hold any type + context map[string]interface{} // Holds `$context` as the key + workflowDescriptor map[string]interface{} // $workflow representation in the context + taskDescriptor map[string]interface{} // $task representation in the context + localExprVars map[string]interface{} // Local expression variables defined in a given task or private context. E.g. a For task $item. + StatusPhase []StatusPhaseLog + TasksStatusPhase map[string][]StatusPhaseLog +} + +func NewWorkflowContext(workflow *model.Workflow) (WorkflowContext, error) { + workflowCtx := &workflowContext{} + workflowDef, err := workflow.AsMap() + if err != nil { + return nil, err + } + workflowCtx.taskDescriptor = map[string]interface{}{} + workflowCtx.workflowDescriptor = map[string]interface{}{ + varsWorkflow: map[string]interface{}{ + "id": uuid.NewString(), + "definition": workflowDef, + }, + } + workflowCtx.SetStatus(PendingStatus) + + return workflowCtx, nil +} + +// WithWorkflowContext adds the workflowContext to a parent context +func WithWorkflowContext(parent context.Context, wfCtx WorkflowContext) context.Context { + return context.WithValue(parent, runnerCtxKey, wfCtx) +} + +// GetWorkflowContext retrieves the workflowContext from a context +func GetWorkflowContext(ctx context.Context) (WorkflowContext, error) { + wfCtx, ok := ctx.Value(runnerCtxKey).(*workflowContext) + if !ok { + return nil, ErrWorkflowContextNotFound + } + return wfCtx, nil +} + +func (ctx *workflowContext) SetStartedAt(t time.Time) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + wf, ok := ctx.workflowDescriptor[varsWorkflow].(map[string]interface{}) + if !ok { + wf = make(map[string]interface{}) + ctx.workflowDescriptor[varsWorkflow] = wf + } + + startedAt, ok := wf["startedAt"].(map[string]interface{}) + if !ok { + startedAt = make(map[string]interface{}) + wf["startedAt"] = startedAt + } + + startedAt["iso8601"] = t.UTC().Format(time.RFC3339) +} + +func (ctx *workflowContext) SetRawInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + // Ensure the outer "workflow" map + wf, ok := ctx.workflowDescriptor[varsWorkflow].(map[string]interface{}) + if !ok { + wf = make(map[string]interface{}) + ctx.workflowDescriptor[varsWorkflow] = wf + } + + // Store the input + wf["input"] = input +} + +func (ctx *workflowContext) AddLocalExprVars(vars map[string]interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.localExprVars == nil { + ctx.localExprVars = map[string]interface{}{} + } + for k, v := range vars { + ctx.localExprVars[k] = v + } +} + +func (ctx *workflowContext) RemoveLocalExprVars(keys ...string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if ctx.localExprVars == nil { + return + } + + for _, k := range keys { + delete(ctx.localExprVars, k) + } +} + +func (ctx *workflowContext) SetLocalExprVars(vars map[string]interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.localExprVars = vars +} + +func (ctx *workflowContext) GetVars() map[string]interface{} { + vars := make(map[string]interface{}) + vars[varsInput] = ctx.GetInput() + vars[varsOutput] = ctx.GetOutput() + vars[varsContext] = ctx.GetInstanceCtx() + vars[varsTask] = ctx.taskDescriptor[varsTask] + vars[varsWorkflow] = ctx.workflowDescriptor[varsWorkflow] + vars[varsRuntime] = map[string]interface{}{ + "name": runtimeName, + "version": runtimeVersion, + } + for varName, varValue := range ctx.localExprVars { + vars[varName] = varValue + } + return vars +} + +func (ctx *workflowContext) SetStatus(status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.StatusPhase == nil { + ctx.StatusPhase = []StatusPhaseLog{} + } + ctx.StatusPhase = append(ctx.StatusPhase, NewStatusPhaseLog(status)) +} + +// SetInstanceCtx safely sets the `$context` value +func (ctx *workflowContext) SetInstanceCtx(value interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + ctx.context = make(map[string]interface{}) + } + ctx.context[varsContext] = value +} + +// GetInstanceCtx safely retrieves the `$context` value +func (ctx *workflowContext) GetInstanceCtx() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.context == nil { + return nil + } + return ctx.context[varsContext] +} + +// SetInput safely sets the input +func (ctx *workflowContext) SetInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.input = input +} + +// GetInput safely retrieves the input +func (ctx *workflowContext) GetInput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.input +} + +// SetOutput safely sets the output +func (ctx *workflowContext) SetOutput(output interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.output = output +} + +// GetOutput safely retrieves the output +func (ctx *workflowContext) GetOutput() interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + return ctx.output +} + +// GetInputAsMap safely retrieves the input as a map[string]interface{}. +// If input is not a map, it creates a map with an empty string key and the input as the value. +func (ctx *workflowContext) GetInputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if inputMap, ok := ctx.input.(map[string]interface{}); ok { + return inputMap + } + + // If input is not a map, create a map with an empty key and set input as the value + return map[string]interface{}{ + "": ctx.input, + } +} + +// GetOutputAsMap safely retrieves the output as a map[string]interface{}. +// If output is not a map, it creates a map with an empty string key and the output as the value. +func (ctx *workflowContext) GetOutputAsMap() map[string]interface{} { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if outputMap, ok := ctx.output.(map[string]interface{}); ok { + return outputMap + } + + // If output is not a map, create a map with an empty key and set output as the value + return map[string]interface{}{ + "": ctx.output, + } +} + +func (ctx *workflowContext) SetTaskStatus(task string, status StatusPhase) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + if ctx.TasksStatusPhase == nil { + ctx.TasksStatusPhase = map[string][]StatusPhaseLog{} + } + ctx.TasksStatusPhase[task] = append(ctx.TasksStatusPhase[task], NewStatusPhaseLog(status)) +} + +func (ctx *workflowContext) SetTaskRawInput(input interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["input"] = input +} + +func (ctx *workflowContext) SetTaskRawOutput(output interface{}) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["output"] = output +} + +func (ctx *workflowContext) SetTaskDef(task model.Task) error { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + if task == nil { + return errors.New("SetTaskDef called with nil model.Task") + } + + defBytes, err := json.Marshal(task) + if err != nil { + return fmt.Errorf("failed to marshal task: %w", err) + } + + var defMap map[string]interface{} + if err := json.Unmarshal(defBytes, &defMap); err != nil { + return fmt.Errorf("failed to unmarshal task into map: %w", err) + } + + taskMap, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + taskMap = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = taskMap + } + + taskMap["definition"] = defMap + + return nil +} + +func (ctx *workflowContext) SetTaskStartedAt(startedAt time.Time) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["startedAt"] = startedAt.UTC().Format(time.RFC3339) +} + +func (ctx *workflowContext) SetTaskName(name string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["name"] = name +} + +func (ctx *workflowContext) SetTaskReference(ref string) { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + task = make(map[string]interface{}) + ctx.taskDescriptor[varsTask] = task + } + + task["reference"] = ref +} + +func (ctx *workflowContext) GetTaskReference() string { + ctx.mu.Lock() + defer ctx.mu.Unlock() + task, ok := ctx.taskDescriptor[varsTask].(map[string]interface{}) + if !ok { + return "" + } + return task["reference"].(string) +} + +func (ctx *workflowContext) ClearTaskContext() { + ctx.mu.Lock() + defer ctx.mu.Unlock() + ctx.taskDescriptor[varsTask] = make(map[string]interface{}) +} diff --git a/impl/status_phase.go b/impl/ctx/status_phase.go similarity index 99% rename from impl/status_phase.go rename to impl/ctx/status_phase.go index ca61fad..ddcab9c 100644 --- a/impl/status_phase.go +++ b/impl/ctx/status_phase.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package impl +package ctx import "time" diff --git a/impl/expr/expr.go b/impl/expr/expr.go new file mode 100644 index 0000000..03d558e --- /dev/null +++ b/impl/expr/expr.go @@ -0,0 +1,133 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package expr + +import ( + "context" + "errors" + "fmt" + "github.com/itchyny/gojq" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func TraverseAndEvaluateWithVars(node interface{}, input interface{}, variables map[string]interface{}, nodeContext context.Context) (interface{}, error) { + if err := mergeContextInVars(nodeContext, variables); err != nil { + return nil, err + } + return traverseAndEvaluate(node, input, variables) +} + +// TraverseAndEvaluate recursively processes and evaluates all expressions in a JSON-like structure +func TraverseAndEvaluate(node interface{}, input interface{}, nodeContext context.Context) (interface{}, error) { + return TraverseAndEvaluateWithVars(node, input, map[string]interface{}{}, nodeContext) +} + +func traverseAndEvaluate(node interface{}, input interface{}, variables map[string]interface{}) (interface{}, error) { + switch v := node.(type) { + case map[string]interface{}: + // Traverse map + for key, value := range v { + evaluatedValue, err := traverseAndEvaluate(value, input, variables) + if err != nil { + return nil, err + } + v[key] = evaluatedValue + } + return v, nil + + case []interface{}: + // Traverse array + for i, value := range v { + evaluatedValue, err := traverseAndEvaluate(value, input, variables) + if err != nil { + return nil, err + } + v[i] = evaluatedValue + } + return v, nil + + case string: + // Check if the string is a runtime expression (e.g., ${ .some.path }) + if model.IsStrictExpr(v) { + return evaluateJQExpression(model.SanitizeExpr(v), input, variables) + } + return v, nil + + default: + // Return other types as-is + return v, nil + } +} + +// evaluateJQExpression evaluates a jq expression against a given JSON input +func evaluateJQExpression(expression string, input interface{}, variables map[string]interface{}) (interface{}, error) { + query, err := gojq.Parse(expression) + if err != nil { + return nil, fmt.Errorf("failed to parse jq expression: %s, error: %w", expression, err) + } + + // Get the variable names & values in a single pass: + names, values := getVariableNamesAndValues(variables) + + code, err := gojq.Compile(query, gojq.WithVariables(names)) + if err != nil { + return nil, fmt.Errorf("failed to compile jq expression: %s, error: %w", expression, err) + } + + iter := code.Run(input, values...) + result, ok := iter.Next() + if !ok { + return nil, errors.New("no result from jq evaluation") + } + + // If there's an error from the jq engine, report it + if errVal, isErr := result.(error); isErr { + return nil, fmt.Errorf("jq evaluation error: %w", errVal) + } + + return result, nil +} + +// getVariableNamesAndValues constructs two slices, where 'names[i]' matches 'values[i]'. +func getVariableNamesAndValues(vars map[string]interface{}) ([]string, []interface{}) { + names := make([]string, 0, len(vars)) + values := make([]interface{}, 0, len(vars)) + + for k, v := range vars { + names = append(names, k) + values = append(values, v) + } + return names, values +} + +func mergeContextInVars(nodeCtx context.Context, variables map[string]interface{}) error { + if variables == nil { + variables = make(map[string]interface{}) + } + wfCtx, err := ctx.GetWorkflowContext(nodeCtx) + if err != nil { + if errors.Is(err, ctx.ErrWorkflowContextNotFound) { + return nil + } + return err + } + // merge + for k, val := range wfCtx.GetVars() { + variables[k] = val + } + + return nil +} diff --git a/impl/expr/expr_test.go b/impl/expr/expr_test.go new file mode 100644 index 0000000..f2af54a --- /dev/null +++ b/impl/expr/expr_test.go @@ -0,0 +1,263 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package expr + +import ( + "context" + "fmt" + "testing" + + "github.com/itchyny/gojq" +) + +func TestTraverseAndEvaluate(t *testing.T) { + t.Run("Simple no-expression map", func(t *testing.T) { + node := map[string]interface{}{ + "key": "value", + "num": 123, + } + result, err := TraverseAndEvaluate(node, nil, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map") + } + if got["key"] != "value" || got["num"] != 123 { + t.Errorf("TraverseAndEvaluate() returned unexpected map data: %#v", got) + } + }) + + t.Run("Expression in map", func(t *testing.T) { + node := map[string]interface{}{ + "expr": "${ .foo }", + } + input := map[string]interface{}{ + "foo": "bar", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map") + } + if got["expr"] != "bar" { + t.Errorf("TraverseAndEvaluate() = %v, want %v", got["expr"], "bar") + } + }) + + t.Run("Expression in array", func(t *testing.T) { + node := []interface{}{ + "static", + "${ .foo }", + } + input := map[string]interface{}{ + "foo": "bar", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() unexpected error: %v", err) + } + + got, ok := result.([]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return an array") + } + if got[0] != "static" { + t.Errorf("TraverseAndEvaluate()[0] = %v, want 'static'", got[0]) + } + if got[1] != "bar" { + t.Errorf("TraverseAndEvaluate()[1] = %v, want 'bar'", got[1]) + } + }) + + t.Run("Nested structures", func(t *testing.T) { + node := map[string]interface{}{ + "level1": []interface{}{ + map[string]interface{}{ + "expr": "${ .foo }", + }, + }, + } + input := map[string]interface{}{ + "foo": "nestedValue", + } + + result, err := TraverseAndEvaluate(node, input, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluate() error: %v", err) + } + + resMap, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map at top-level") + } + + level1, ok := resMap["level1"].([]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return an array for resMap['level1']") + } + + level1Map, ok := level1[0].(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluate() did not return a map for level1[0]") + } + + if level1Map["expr"] != "nestedValue" { + t.Errorf("TraverseAndEvaluate() = %v, want %v", level1Map["expr"], "nestedValue") + } + }) + + t.Run("Invalid JQ expression", func(t *testing.T) { + node := "${ .foo( }" + input := map[string]interface{}{ + "foo": "bar", + } + + _, err := TraverseAndEvaluate(node, input, context.TODO()) + if err == nil { + t.Errorf("TraverseAndEvaluate() expected error for invalid JQ, got nil") + } + }) +} + +func TestTraverseAndEvaluateWithVars(t *testing.T) { + t.Run("Variable usage in expression", func(t *testing.T) { + node := map[string]interface{}{ + "expr": "${ $myVar }", + } + variables := map[string]interface{}{ + "$myVar": "HelloVars", + } + input := map[string]interface{}{} + + result, err := TraverseAndEvaluateWithVars(node, input, variables, context.TODO()) + if err != nil { + t.Fatalf("TraverseAndEvaluateWithVars() unexpected error: %v", err) + } + got, ok := result.(map[string]interface{}) + if !ok { + t.Fatalf("TraverseAndEvaluateWithVars() did not return a map") + } + if got["expr"] != "HelloVars" { + t.Errorf("TraverseAndEvaluateWithVars() = %v, want %v", got["expr"], "HelloVars") + } + }) + + t.Run("Reference variable that isn't defined", func(t *testing.T) { + // This tries to use a variable that isn't passed in, + // so presumably it yields an error about an undefined variable. + node := "${ $notProvided }" + input := map[string]interface{}{ + "foo": "bar", + } + variables := map[string]interface{}{} // intentionally empty + + _, err := TraverseAndEvaluateWithVars(node, input, variables, context.TODO()) + if err == nil { + t.Errorf("TraverseAndEvaluateWithVars() expected error for undefined variable, got nil") + } else { + t.Logf("Got expected error: %v", err) + } + }) +} + +func TestEvaluateJQExpressionDirect(t *testing.T) { + // This tests the core evaluator directly for errors and success. + t.Run("Successful eval", func(t *testing.T) { + expression := ".foo" + input := map[string]interface{}{"foo": "bar"} + variables := map[string]interface{}{} + result, err := callEvaluateJQ(expression, input, variables) + if err != nil { + t.Fatalf("evaluateJQExpression() error = %v, want nil", err) + } + if result != "bar" { + t.Errorf("evaluateJQExpression() = %v, want 'bar'", result) + } + }) + + t.Run("Parse error", func(t *testing.T) { + expression := ".foo(" + input := map[string]interface{}{"foo": "bar"} + variables := map[string]interface{}{} + _, err := callEvaluateJQ(expression, input, variables) + if err == nil { + t.Errorf("evaluateJQExpression() expected parse error, got nil") + } + }) + + t.Run("Runtime error in evaluation (undefined variable)", func(t *testing.T) { + expression := "$undefinedVar" + input := map[string]interface{}{ + "foo": []interface{}{1, 2}, + } + variables := map[string]interface{}{} + _, err := callEvaluateJQ(expression, input, variables) + if err == nil { + t.Errorf("callEvaluateJQ() expected runtime error, got nil") + } else { + t.Logf("Got expected error: %v", err) + } + }) +} + +// Helper to call the unexported evaluateJQExpression via a wrapper in tests. +// Alternatively, you could move `evaluateJQExpression` into a separate file that +// is also in package `expr`, then test it directly if needed. +func callEvaluateJQ(expression string, input interface{}, variables map[string]interface{}) (interface{}, error) { + // Replicate the logic from evaluateJQExpression for direct testing + query, err := gojq.Parse(expression) + if err != nil { + return nil, fmt.Errorf("failed to parse: %w", err) + } + code, err := gojq.Compile(query, gojq.WithVariables(exprGetVariableNames(variables))) + if err != nil { + return nil, fmt.Errorf("failed to compile: %w", err) + } + iter := code.Run(input, exprGetVariableValues(variables)...) + result, ok := iter.Next() + if !ok { + return nil, fmt.Errorf("no result from jq evaluation") + } + if e, isErr := result.(error); isErr { + return nil, fmt.Errorf("runtime error: %w", e) + } + return result, nil +} + +// Local copies of the variable-gathering logic from your code: +func exprGetVariableNames(variables map[string]interface{}) []string { + names := make([]string, 0, len(variables)) + for name := range variables { + names = append(names, name) + } + return names +} + +func exprGetVariableValues(variables map[string]interface{}) []interface{} { + vals := make([]interface{}, 0, len(variables)) + for _, val := range variables { + vals = append(vals, val) + } + return vals +} diff --git a/impl/json_pointer.go b/impl/json_pointer.go new file mode 100644 index 0000000..4d276ff --- /dev/null +++ b/impl/json_pointer.go @@ -0,0 +1,77 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "encoding/json" + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/model" + "reflect" + "strings" +) + +func findJsonPointer(data interface{}, target string, path string) (string, bool) { + switch node := data.(type) { + case map[string]interface{}: + for key, value := range node { + newPath := fmt.Sprintf("%s/%s", path, key) + if key == target { + return newPath, true + } + if result, found := findJsonPointer(value, target, newPath); found { + return result, true + } + } + case []interface{}: + for i, item := range node { + newPath := fmt.Sprintf("%s/%d", path, i) + if result, found := findJsonPointer(item, target, newPath); found { + return result, true + } + } + } + return "", false +} + +// GenerateJSONPointer Function to generate JSON Pointer from a Workflow reference +func GenerateJSONPointer(workflow *model.Workflow, targetNode interface{}) (string, error) { + // Convert struct to JSON + jsonData, err := json.Marshal(workflow) + if err != nil { + return "", fmt.Errorf("error marshalling to JSON: %w", err) + } + + // Convert JSON to a generic map for traversal + var jsonMap map[string]interface{} + if err := json.Unmarshal(jsonData, &jsonMap); err != nil { + return "", fmt.Errorf("error unmarshalling JSON: %w", err) + } + + transformedNode := "" + switch node := targetNode.(type) { + case string: + transformedNode = node + default: + transformedNode = strings.ToLower(reflect.TypeOf(targetNode).Name()) + } + + // Search for the target node + jsonPointer, found := findJsonPointer(jsonMap, transformedNode, "") + if !found { + return "", fmt.Errorf("node '%s' not found", targetNode) + } + + return jsonPointer, nil +} diff --git a/model/errors_test.go b/impl/json_pointer_test.go similarity index 53% rename from model/errors_test.go rename to impl/json_pointer_test.go index 12a00fb..76077bc 100644 --- a/model/errors_test.go +++ b/impl/json_pointer_test.go @@ -12,21 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package impl import ( - "testing" - + "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/stretchr/testify/assert" + "testing" ) // TestGenerateJSONPointer_SimpleTask tests a simple workflow task. func TestGenerateJSONPointer_SimpleTask(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "simple-workflow"}, - Do: &TaskList{ - &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"value": 10}}}, - &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + workflow := &model.Workflow{ + Document: model.Document{Name: "simple-workflow"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "task1", Task: &model.SetTask{Set: map[string]interface{}{"value": 10}}}, + &model.TaskItem{Key: "task2", Task: &model.SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, }, } @@ -37,11 +37,11 @@ func TestGenerateJSONPointer_SimpleTask(t *testing.T) { // TestGenerateJSONPointer_SimpleTask tests a simple workflow task. func TestGenerateJSONPointer_Document(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "simple-workflow"}, - Do: &TaskList{ - &TaskItem{Key: "task1", Task: &SetTask{Set: map[string]interface{}{"value": 10}}}, - &TaskItem{Key: "task2", Task: &SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, + workflow := &model.Workflow{ + Document: model.Document{Name: "simple-workflow"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "task1", Task: &model.SetTask{Set: map[string]interface{}{"value": 10}}}, + &model.TaskItem{Key: "task2", Task: &model.SetTask{Set: map[string]interface{}{"double": "${ .value * 2 }"}}}, }, } @@ -51,17 +51,17 @@ func TestGenerateJSONPointer_Document(t *testing.T) { } func TestGenerateJSONPointer_ForkTask(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "fork-example"}, - Do: &TaskList{ - &TaskItem{ + workflow := &model.Workflow{ + Document: model.Document{Name: "fork-example"}, + Do: &model.TaskList{ + &model.TaskItem{ Key: "raiseAlarm", - Task: &ForkTask{ - Fork: ForkTaskConfiguration{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ Compete: true, - Branches: &TaskList{ - {Key: "callNurse", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "put", Endpoint: NewEndpoint("https://hospital.com/api/alert/nurses")}}}, - {Key: "callDoctor", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "put", Endpoint: NewEndpoint("https://hospital.com/api/alert/doctor")}}}, + Branches: &model.TaskList{ + {Key: "callNurse", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/nurses")}}}, + {Key: "callDoctor", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/doctor")}}}, }, }, }, @@ -76,23 +76,23 @@ func TestGenerateJSONPointer_ForkTask(t *testing.T) { // TestGenerateJSONPointer_DeepNestedTask tests multiple nested task levels. func TestGenerateJSONPointer_DeepNestedTask(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "deep-nested"}, - Do: &TaskList{ - &TaskItem{ + workflow := &model.Workflow{ + Document: model.Document{Name: "deep-nested"}, + Do: &model.TaskList{ + &model.TaskItem{ Key: "step1", - Task: &ForkTask{ - Fork: ForkTaskConfiguration{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ Compete: false, - Branches: &TaskList{ + Branches: &model.TaskList{ { Key: "branchA", - Task: &ForkTask{ - Fork: ForkTaskConfiguration{ - Branches: &TaskList{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Branches: &model.TaskList{ { Key: "deepTask", - Task: &SetTask{Set: map[string]interface{}{"result": "done"}}, + Task: &model.SetTask{Set: map[string]interface{}{"result": "done"}}, }, }, }, @@ -112,10 +112,10 @@ func TestGenerateJSONPointer_DeepNestedTask(t *testing.T) { // TestGenerateJSONPointer_NonExistentTask checks for a task that doesn't exist. func TestGenerateJSONPointer_NonExistentTask(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "nonexistent-test"}, - Do: &TaskList{ - &TaskItem{Key: "taskA", Task: &SetTask{Set: map[string]interface{}{"value": 5}}}, + workflow := &model.Workflow{ + Document: model.Document{Name: "nonexistent-test"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "taskA", Task: &model.SetTask{Set: map[string]interface{}{"value": 5}}}, }, } @@ -125,11 +125,11 @@ func TestGenerateJSONPointer_NonExistentTask(t *testing.T) { // TestGenerateJSONPointer_MixedTaskTypes verifies a workflow with different task types. func TestGenerateJSONPointer_MixedTaskTypes(t *testing.T) { - workflow := &Workflow{ - Document: Document{Name: "mixed-tasks"}, - Do: &TaskList{ - &TaskItem{Key: "compute", Task: &SetTask{Set: map[string]interface{}{"result": 42}}}, - &TaskItem{Key: "notify", Task: &CallHTTP{Call: "http", With: HTTPArguments{Method: "post", Endpoint: NewEndpoint("https://api.notify.com")}}}, + workflow := &model.Workflow{ + Document: model.Document{Name: "mixed-tasks"}, + Do: &model.TaskList{ + &model.TaskItem{Key: "compute", Task: &model.SetTask{Set: map[string]interface{}{"result": 42}}}, + &model.TaskItem{Key: "notify", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "post", Endpoint: model.NewEndpoint("https://api.notify.com")}}}, }, } diff --git a/impl/runner.go b/impl/runner.go index c219886..1c9ad8b 100644 --- a/impl/runner.go +++ b/impl/runner.go @@ -17,57 +17,117 @@ package impl import ( "context" "fmt" - + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" + "time" ) var _ WorkflowRunner = &workflowRunnerImpl{} +var _ TaskSupport = &workflowRunnerImpl{} +// WorkflowRunner is the public API to run Workflows type WorkflowRunner interface { GetWorkflowDef() *model.Workflow Run(input interface{}) (output interface{}, err error) - GetContext() *WorkflowContext + GetWorkflowCtx() ctx.WorkflowContext } -func NewDefaultRunner(workflow *model.Workflow) WorkflowRunner { - wfContext := &WorkflowContext{} - wfContext.SetStatus(PendingStatus) +func NewDefaultRunner(workflow *model.Workflow) (WorkflowRunner, error) { + wfContext, err := ctx.NewWorkflowContext(workflow) + if err != nil { + return nil, err + } // TODO: based on the workflow definition, the context might change. - ctx := WithWorkflowContext(context.Background(), wfContext) + objCtx := ctx.WithWorkflowContext(context.Background(), wfContext) return &workflowRunnerImpl{ Workflow: workflow, - Context: ctx, + Context: objCtx, RunnerCtx: wfContext, - } + }, nil } type workflowRunnerImpl struct { Workflow *model.Workflow Context context.Context - RunnerCtx *WorkflowContext + RunnerCtx ctx.WorkflowContext } -func (wr *workflowRunnerImpl) GetContext() *WorkflowContext { - return wr.RunnerCtx +func (wr *workflowRunnerImpl) RemoveLocalExprVars(keys ...string) { + wr.RunnerCtx.RemoveLocalExprVars(keys...) +} + +func (wr *workflowRunnerImpl) AddLocalExprVars(vars map[string]interface{}) { + wr.RunnerCtx.AddLocalExprVars(vars) +} + +func (wr *workflowRunnerImpl) SetLocalExprVars(vars map[string]interface{}) { + wr.RunnerCtx.SetLocalExprVars(vars) +} + +func (wr *workflowRunnerImpl) SetTaskReferenceFromName(taskName string) error { + ref, err := GenerateJSONPointer(wr.Workflow, taskName) + if err != nil { + return err + } + wr.RunnerCtx.SetTaskReference(ref) + return nil +} + +func (wr *workflowRunnerImpl) GetTaskReference() string { + return wr.RunnerCtx.GetTaskReference() +} + +func (wr *workflowRunnerImpl) SetTaskRawInput(input interface{}) { + wr.RunnerCtx.SetTaskRawInput(input) +} + +func (wr *workflowRunnerImpl) SetTaskRawOutput(output interface{}) { + wr.RunnerCtx.SetTaskRawOutput(output) } -func (wr *workflowRunnerImpl) GetTaskContext() TaskContext { +func (wr *workflowRunnerImpl) SetTaskDef(task model.Task) error { + return wr.RunnerCtx.SetTaskDef(task) +} + +func (wr *workflowRunnerImpl) SetTaskStartedAt(startedAt time.Time) { + wr.RunnerCtx.SetTaskStartedAt(startedAt) +} + +func (wr *workflowRunnerImpl) SetTaskName(name string) { + wr.RunnerCtx.SetTaskName(name) +} + +func (wr *workflowRunnerImpl) GetContext() context.Context { + return wr.Context +} + +func (wr *workflowRunnerImpl) GetWorkflowCtx() ctx.WorkflowContext { return wr.RunnerCtx } +func (wr *workflowRunnerImpl) SetTaskStatus(task string, status ctx.StatusPhase) { + wr.RunnerCtx.SetTaskStatus(task, status) +} + func (wr *workflowRunnerImpl) GetWorkflowDef() *model.Workflow { return wr.Workflow } +func (wr *workflowRunnerImpl) SetWorkflowInstanceCtx(value interface{}) { + wr.RunnerCtx.SetInstanceCtx(value) +} + // Run executes the workflow synchronously. func (wr *workflowRunnerImpl) Run(input interface{}) (output interface{}, err error) { defer func() { if err != nil { - wr.RunnerCtx.SetStatus(FaultedStatus) - err = wr.wrapWorkflowError(err, "/") + wr.RunnerCtx.SetStatus(ctx.FaultedStatus) + err = wr.wrapWorkflowError(err) } }() + wr.RunnerCtx.SetRawInput(input) + // Process input if input, err = wr.processInput(input); err != nil { return nil, err @@ -75,42 +135,57 @@ func (wr *workflowRunnerImpl) Run(input interface{}) (output interface{}, err er wr.RunnerCtx.SetInput(input) // Run tasks sequentially - wr.RunnerCtx.SetStatus(RunningStatus) + wr.RunnerCtx.SetStatus(ctx.RunningStatus) doRunner, err := NewDoTaskRunner(wr.Workflow.Do, wr) if err != nil { return nil, err } + wr.RunnerCtx.SetStartedAt(time.Now()) output, err = doRunner.Run(wr.RunnerCtx.GetInput()) if err != nil { return nil, err } + wr.RunnerCtx.ClearTaskContext() + // Process output if output, err = wr.processOutput(output); err != nil { return nil, err } wr.RunnerCtx.SetOutput(output) - wr.RunnerCtx.SetStatus(CompletedStatus) + wr.RunnerCtx.SetStatus(ctx.CompletedStatus) return output, nil } // wrapWorkflowError ensures workflow errors have a proper instance reference. -func (wr *workflowRunnerImpl) wrapWorkflowError(err error, taskName string) error { +func (wr *workflowRunnerImpl) wrapWorkflowError(err error) error { + taskReference := wr.RunnerCtx.GetTaskReference() + if len(taskReference) == 0 { + taskReference = "/" + } if knownErr := model.AsError(err); knownErr != nil { - return knownErr.WithInstanceRef(wr.Workflow, taskName) + return knownErr.WithInstanceRef(wr.Workflow, taskReference) } - return model.NewErrRuntime(fmt.Errorf("workflow '%s', task '%s': %w", wr.Workflow.Document.Name, taskName, err), taskName) + return model.NewErrRuntime(fmt.Errorf("workflow '%s', task '%s': %w", wr.Workflow.Document.Name, taskReference, err), taskReference) } // processInput validates and transforms input if needed. func (wr *workflowRunnerImpl) processInput(input interface{}) (output interface{}, err error) { if wr.Workflow.Input != nil { - output, err = processIO(input, wr.Workflow.Input.Schema, wr.Workflow.Input.From, "/") - if err != nil { - return nil, err + if wr.Workflow.Input.Schema != nil { + if err = validateSchema(input, wr.Workflow.Input.Schema, "/"); err != nil { + return nil, err + } + } + + if wr.Workflow.Input.From != nil { + output, err = traverseAndEvaluate(wr.Workflow.Input.From, input, "/", wr.Context) + if err != nil { + return nil, err + } + return output, nil } - return output, nil } return input, nil } @@ -118,7 +193,18 @@ func (wr *workflowRunnerImpl) processInput(input interface{}) (output interface{ // processOutput applies output transformations. func (wr *workflowRunnerImpl) processOutput(output interface{}) (interface{}, error) { if wr.Workflow.Output != nil { - return processIO(output, wr.Workflow.Output.Schema, wr.Workflow.Output.As, "/") + if wr.Workflow.Output.As != nil { + var err error + output, err = traverseAndEvaluate(wr.Workflow.Output.As, output, "/", wr.Context) + if err != nil { + return nil, err + } + } + if wr.Workflow.Output.Schema != nil { + if err := validateSchema(output, wr.Workflow.Output.Schema, "/"); err != nil { + return nil, err + } + } } return output, nil } diff --git a/impl/task_runner_test.go b/impl/runner_test.go similarity index 81% rename from impl/task_runner_test.go rename to impl/runner_test.go index c5a76d7..32c9c86 100644 --- a/impl/task_runner_test.go +++ b/impl/runner_test.go @@ -15,15 +15,60 @@ package impl import ( - "os" - "path/filepath" - "testing" - + "context" + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/serverlessworkflow/sdk-go/v3/parser" "github.com/stretchr/testify/assert" + "os" + "path/filepath" + "testing" ) +type taskSupportOpts func(*workflowRunnerImpl) + +// newTaskSupport returns an instance of TaskSupport for test purposes +func newTaskSupport(opts ...taskSupportOpts) TaskSupport { + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + if err != nil { + panic(fmt.Errorf("failed to create workflow context within the test environment: %v", err)) + } + + ts := &workflowRunnerImpl{ + Workflow: nil, + Context: context.TODO(), + RunnerCtx: wfCtx, + } + + // Apply each functional option to ts + for _, opt := range opts { + opt(ts) + } + + return ts +} + +//nolint:unused +func withWorkflow(wf *model.Workflow) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.Workflow = wf + } +} + +//nolint:unused +func withContext(ctx context.Context) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.Context = ctx + } +} + +func withRunnerCtx(workflowContext ctx.WorkflowContext) taskSupportOpts { + return func(ts *workflowRunnerImpl) { + ts.RunnerCtx = workflowContext + } +} + // runWorkflowTest is a reusable test function for workflows func runWorkflowTest(t *testing.T, workflowPath string, input, expectedOutput map[string]interface{}) { // Run the workflow @@ -50,7 +95,8 @@ func runWorkflow(t *testing.T, workflowPath string, input, expectedOutput map[st assert.NoError(t, err, "Failed to parse workflow YAML") // Initialize the workflow runner - runner := NewDefaultRunner(workflow) + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) // Run the workflow output, err = runner.Run(input) @@ -151,7 +197,8 @@ func TestWorkflowRunner_Run_YAML_WithSchemaValidation(t *testing.T) { assert.NoError(t, err, "Failed to read workflow YAML file") workflow, err := parser.FromYAMLSource(yamlBytes) assert.NoError(t, err, "Failed to parse workflow YAML") - runner := NewDefaultRunner(workflow) + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) _, err = runner.Run(input) assert.Error(t, err, "Expected validation error for invalid input") assert.Contains(t, err.Error(), "JSON schema validation failed") @@ -178,7 +225,8 @@ func TestWorkflowRunner_Run_YAML_WithSchemaValidation(t *testing.T) { assert.NoError(t, err, "Failed to read workflow YAML file") workflow, err := parser.FromYAMLSource(yamlBytes) assert.NoError(t, err, "Failed to parse workflow YAML") - runner := NewDefaultRunner(workflow) + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) _, err = runner.Run(input) assert.Error(t, err, "Expected validation error for invalid task input") assert.Contains(t, err.Error(), "JSON schema validation failed") @@ -205,7 +253,8 @@ func TestWorkflowRunner_Run_YAML_WithSchemaValidation(t *testing.T) { assert.NoError(t, err, "Failed to read workflow YAML file") workflow, err := parser.FromYAMLSource(yamlBytes) assert.NoError(t, err, "Failed to parse workflow YAML") - runner := NewDefaultRunner(workflow) + runner, err := NewDefaultRunner(workflow) + assert.NoError(t, err) _, err = runner.Run(input) assert.Error(t, err, "Expected validation error for invalid task output") assert.Contains(t, err.Error(), "JSON schema validation failed") @@ -266,9 +315,12 @@ func TestWorkflowRunner_Run_YAML_ControlFlow(t *testing.T) { func TestWorkflowRunner_Run_YAML_RaiseTasks(t *testing.T) { // TODO: add $workflow context to the expr processing - //t.Run("Raise Inline Error", func(t *testing.T) { - // runWorkflowTest(t, "./testdata/raise_inline.yaml", nil, nil) - //}) + t.Run("Raise Inline Error", func(t *testing.T) { + runWorkflowWithErr(t, "./testdata/raise_inline.yaml", nil, nil, func(err error) { + assert.Equal(t, model.ErrorTypeValidation, model.AsError(err).Type.String()) + assert.Equal(t, "Invalid input provided to workflow raise-inline", model.AsError(err).Detail.String()) + }) + }) t.Run("Raise Referenced Error", func(t *testing.T) { runWorkflowWithErr(t, "./testdata/raise_reusable.yaml", nil, nil, @@ -312,7 +364,6 @@ func TestWorkflowRunner_Run_YAML_RaiseTasks_ControlFlow(t *testing.T) { } func TestForTaskRunner_Run(t *testing.T) { - t.Skip("Skipping until the For task is implemented - missing JQ variables implementation") t.Run("Simple For with Colors", func(t *testing.T) { workflowPath := "./testdata/for_colors.yaml" input := map[string]interface{}{ @@ -320,8 +371,36 @@ func TestForTaskRunner_Run(t *testing.T) { } expectedOutput := map[string]interface{}{ "processed": map[string]interface{}{ - "colors": []string{"red", "green", "blue"}, - "indexed": []float64{0, 1, 2}, + "colors": []interface{}{"red", "green", "blue"}, + "indexes": []interface{}{0, 1, 2}, + }, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("SUM Numbers", func(t *testing.T) { + workflowPath := "./testdata/for_sum_numbers.yaml" + input := map[string]interface{}{ + "numbers": []int32{2, 3, 4}, + } + expectedOutput := map[string]interface{}{ + "result": interface{}(9), + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("For Nested Loops", func(t *testing.T) { + workflowPath := "./testdata/for_nested_loops.yaml" + input := map[string]interface{}{ + "fruits": []interface{}{"apple", "banana"}, + "colors": []interface{}{"red", "green"}, + } + expectedOutput := map[string]interface{}{ + "matrix": []interface{}{ + []interface{}{"apple", "red"}, + []interface{}{"apple", "green"}, + []interface{}{"banana", "red"}, + []interface{}{"banana", "green"}, }, } runWorkflowTest(t, workflowPath, input, expectedOutput) diff --git a/impl/task_runner.go b/impl/task_runner.go index 05d3817..a302bca 100644 --- a/impl/task_runner.go +++ b/impl/task_runner.go @@ -15,238 +15,41 @@ package impl import ( - "fmt" - "reflect" - "strings" - - "github.com/serverlessworkflow/sdk-go/v3/expr" + "context" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" + "time" ) var _ TaskRunner = &SetTaskRunner{} var _ TaskRunner = &RaiseTaskRunner{} var _ TaskRunner = &ForTaskRunner{} +var _ TaskRunner = &DoTaskRunner{} type TaskRunner interface { Run(input interface{}) (interface{}, error) GetTaskName() string } -func NewSetTaskRunner(taskName string, task *model.SetTask) (*SetTaskRunner, error) { - if task == nil || task.Set == nil { - return nil, model.NewErrValidation(fmt.Errorf("no set configuration provided for SetTask %s", taskName), taskName) - } - return &SetTaskRunner{ - Task: task, - TaskName: taskName, - }, nil -} - -type SetTaskRunner struct { - Task *model.SetTask - TaskName string -} - -func (s *SetTaskRunner) GetTaskName() string { - return s.TaskName -} - -func (s *SetTaskRunner) Run(input interface{}) (output interface{}, err error) { - setObject := deepClone(s.Task.Set) - result, err := expr.TraverseAndEvaluate(setObject, input) - if err != nil { - return nil, model.NewErrExpression(err, s.TaskName) - } - - output, ok := result.(map[string]interface{}) - if !ok { - return nil, model.NewErrRuntime(fmt.Errorf("expected output to be a map[string]interface{}, but got a different type. Got: %v", result), s.TaskName) - } - - return output, nil -} - -func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, workflowDef *model.Workflow) (*RaiseTaskRunner, error) { - if err := resolveErrorDefinition(task, workflowDef); err != nil { - return nil, err - } - if task.Raise.Error.Definition == nil { - return nil, model.NewErrValidation(fmt.Errorf("no raise configuration provided for RaiseTask %s", taskName), taskName) - } - return &RaiseTaskRunner{ - Task: task, - TaskName: taskName, - }, nil -} - -// TODO: can e refactored to a definition resolver callable from the context -func resolveErrorDefinition(t *model.RaiseTask, workflowDef *model.Workflow) error { - if workflowDef != nil && t.Raise.Error.Ref != nil { - notFoundErr := model.NewErrValidation(fmt.Errorf("%v error definition not found in 'uses'", t.Raise.Error.Ref), "") - if workflowDef.Use != nil && workflowDef.Use.Errors != nil { - definition, ok := workflowDef.Use.Errors[*t.Raise.Error.Ref] - if !ok { - return notFoundErr - } - t.Raise.Error.Definition = definition - return nil - } - return notFoundErr - } - return nil -} - -type RaiseTaskRunner struct { - Task *model.RaiseTask - TaskName string -} - -var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ - model.ErrorTypeAuthentication: model.NewErrAuthentication, - model.ErrorTypeValidation: model.NewErrValidation, - model.ErrorTypeCommunication: model.NewErrCommunication, - model.ErrorTypeAuthorization: model.NewErrAuthorization, - model.ErrorTypeConfiguration: model.NewErrConfiguration, - model.ErrorTypeExpression: model.NewErrExpression, - model.ErrorTypeRuntime: model.NewErrRuntime, - model.ErrorTypeTimeout: model.NewErrTimeout, -} - -func (r *RaiseTaskRunner) Run(input interface{}) (output interface{}, err error) { - output = input - // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition - var detailResult interface{} - detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName) - if err != nil { - return nil, err - } - - var titleResult interface{} - titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName) - if err != nil { - return nil, err - } - - instance := &model.JsonPointerOrRuntimeExpression{Value: r.TaskName} - - var raiseErr *model.Error - if raiseErrF, ok := raiseErrFuncMapping[r.Task.Raise.Error.Definition.Type.String()]; ok { - raiseErr = raiseErrF(fmt.Errorf("%v", detailResult), instance.String()) - } else { - raiseErr = r.Task.Raise.Error.Definition - raiseErr.Detail = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", detailResult)) - raiseErr.Instance = instance - } - - raiseErr.Title = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", titleResult)) - err = raiseErr - - return output, err -} - -func (r *RaiseTaskRunner) GetTaskName() string { - return r.TaskName -} - -func NewForTaskRunner(taskName string, task *model.ForTask, taskSupport TaskSupport) (*ForTaskRunner, error) { - if task == nil || task.Do == nil { - return nil, model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) - } - - doRunner, err := NewDoTaskRunner(task.Do, taskSupport) - if err != nil { - return nil, err - } - - return &ForTaskRunner{ - Task: task, - TaskName: taskName, - DoRunner: doRunner, - }, nil -} - -const ( - forTaskDefaultEach = "$item" - forTaskDefaultAt = "$index" -) - -type ForTaskRunner struct { - Task *model.ForTask - TaskName string - DoRunner *DoTaskRunner -} - -func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { - f.sanitizeFor() - in, err := expr.TraverseAndEvaluate(f.Task.For.In, input) - if err != nil { - return nil, err - } - - var forOutput interface{} - rv := reflect.ValueOf(in) - switch rv.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < rv.Len(); i++ { - item := rv.Index(i).Interface() - - if forOutput, err = f.processForItem(i, item, forOutput); err != nil { - return nil, err - } - } - case reflect.Invalid: - return input, nil - default: - if forOutput, err = f.processForItem(0, in, forOutput); err != nil { - return nil, err - } - } - - return forOutput, nil -} - -func (f *ForTaskRunner) processForItem(idx int, item interface{}, forOutput interface{}) (interface{}, error) { - forInput := map[string]interface{}{ - f.Task.For.At: idx, - f.Task.For.Each: item, - } - if forOutput != nil { - if outputMap, ok := forOutput.(map[string]interface{}); ok { - for key, value := range outputMap { - forInput[key] = value - } - } else { - return nil, fmt.Errorf("task %s item %s at index %d returned a non-json object, impossible to merge context", f.TaskName, f.Task.For.Each, idx) - } - } - var err error - forOutput, err = f.DoRunner.Run(forInput) - if err != nil { - return nil, err - } - - return forOutput, nil -} - -func (f *ForTaskRunner) sanitizeFor() { - f.Task.For.Each = strings.TrimSpace(f.Task.For.Each) - f.Task.For.At = strings.TrimSpace(f.Task.For.At) - - if f.Task.For.Each == "" { - f.Task.For.Each = forTaskDefaultEach - } - if f.Task.For.At == "" { - f.Task.For.At = forTaskDefaultAt - } - - if !strings.HasPrefix(f.Task.For.Each, "$") { - f.Task.For.Each = "$" + f.Task.For.Each - } - if !strings.HasPrefix(f.Task.For.At, "$") { - f.Task.For.At = "$" + f.Task.For.At - } -} - -func (f *ForTaskRunner) GetTaskName() string { - return f.TaskName +type TaskSupport interface { + SetTaskStatus(task string, status ctx.StatusPhase) + GetWorkflowDef() *model.Workflow + // SetWorkflowInstanceCtx is the `$context` variable accessible in JQ expressions and set in `export.as` + SetWorkflowInstanceCtx(value interface{}) + // GetContext gets the sharable Workflow context. Accessible via ctx.GetWorkflowContext. + GetContext() context.Context + SetTaskRawInput(value interface{}) + SetTaskRawOutput(value interface{}) + SetTaskDef(task model.Task) error + SetTaskStartedAt(value time.Time) + SetTaskName(name string) + // SetTaskReferenceFromName based on the taskName and the model.Workflow definition, set the JSON Pointer reference to the context + SetTaskReferenceFromName(taskName string) error + GetTaskReference() string + // SetLocalExprVars overrides local variables in expression processing + SetLocalExprVars(vars map[string]interface{}) + // AddLocalExprVars adds to the local variables in expression processing. Won't override previous entries. + AddLocalExprVars(vars map[string]interface{}) + // RemoveLocalExprVars removes local variables added in AddLocalExprVars or SetLocalExprVars + RemoveLocalExprVars(keys ...string) } diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go index a34a4dd..75249b1 100644 --- a/impl/task_runner_do.go +++ b/impl/task_runner_do.go @@ -16,27 +16,18 @@ package impl import ( "fmt" - - "github.com/serverlessworkflow/sdk-go/v3/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" + "time" ) -var _ TaskRunner = &DoTaskRunner{} - -type TaskSupport interface { - GetTaskContext() TaskContext - GetWorkflowDef() *model.Workflow -} - -// TODO: refactor to receive a resolver handler instead of the workflow runner - // NewTaskRunner creates a TaskRunner instance based on the task type. func NewTaskRunner(taskName string, task model.Task, taskSupport TaskSupport) (TaskRunner, error) { switch t := task.(type) { case *model.SetTask: - return NewSetTaskRunner(taskName, t) + return NewSetTaskRunner(taskName, t, taskSupport) case *model.RaiseTask: - return NewRaiseTaskRunner(taskName, t, taskSupport.GetWorkflowDef()) + return NewRaiseTaskRunner(taskName, t, taskSupport) case *model.DoTask: return NewDoTaskRunner(t.Do, taskSupport) case *model.ForTask: @@ -62,15 +53,15 @@ func (d *DoTaskRunner) Run(input interface{}) (output interface{}, err error) { if d.TaskList == nil { return input, nil } - return d.executeTasks(input, d.TaskList) + return d.runTasks(input, d.TaskList) } func (d *DoTaskRunner) GetTaskName() string { return "" } -// executeTasks runs all defined tasks sequentially. -func (d *DoTaskRunner) executeTasks(input interface{}, tasks *model.TaskList) (output interface{}, err error) { +// runTasks runs all defined tasks sequentially. +func (d *DoTaskRunner) runTasks(input interface{}, tasks *model.TaskList) (output interface{}, err error) { output = input if tasks == nil { return output, nil @@ -78,9 +69,15 @@ func (d *DoTaskRunner) executeTasks(input interface{}, tasks *model.TaskList) (o idx := 0 currentTask := (*tasks)[idx] - ctx := d.TaskSupport.GetTaskContext() for currentTask != nil { + if err = d.TaskSupport.SetTaskDef(currentTask); err != nil { + return nil, err + } + if err = d.TaskSupport.SetTaskReferenceFromName(currentTask.Key); err != nil { + return nil, err + } + if shouldRun, err := d.shouldRunTask(input, currentTask); err != nil { return output, err } else if !shouldRun { @@ -88,19 +85,19 @@ func (d *DoTaskRunner) executeTasks(input interface{}, tasks *model.TaskList) (o continue } - ctx.SetTaskStatus(currentTask.Key, PendingStatus) + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.PendingStatus) runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, d.TaskSupport) if err != nil { return output, err } - ctx.SetTaskStatus(currentTask.Key, RunningStatus) + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.RunningStatus) if output, err = d.runTask(input, runner, currentTask.Task.GetBase()); err != nil { - ctx.SetTaskStatus(currentTask.Key, FaultedStatus) + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) return output, err } - ctx.SetTaskStatus(currentTask.Key, CompletedStatus) + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) input = deepCloneValue(output) idx, currentTask = tasks.Next(idx) } @@ -110,13 +107,11 @@ func (d *DoTaskRunner) executeTasks(input interface{}, tasks *model.TaskList) (o func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (bool, error) { if task.GetBase().If != nil { - output, err := expr.TraverseAndEvaluate(task.GetBase().If.String(), input) + output, err := traverseAndEvaluateBool(task.GetBase().If.String(), input, d.TaskSupport.GetContext()) if err != nil { return false, model.NewErrExpression(err, task.Key) } - if result, ok := output.(bool); ok && !result { - return false, nil - } + return output, nil } return true, nil } @@ -125,6 +120,10 @@ func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (b func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { taskName := runner.GetTaskName() + d.TaskSupport.SetTaskStartedAt(time.Now()) + d.TaskSupport.SetTaskRawInput(input) + d.TaskSupport.SetTaskName(taskName) + if task.Input != nil { if input, err = d.processTaskInput(task, input, taskName); err != nil { return nil, err @@ -136,10 +135,16 @@ func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model return nil, err } + d.TaskSupport.SetTaskRawOutput(output) + if output, err = d.processTaskOutput(task, output, taskName); err != nil { return nil, err } + if err = d.processTaskExport(task, output, taskName); err != nil { + return nil, err + } + return output, nil } @@ -153,7 +158,7 @@ func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interfac return nil, err } - if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName); err != nil { + if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName, d.TaskSupport.GetContext()); err != nil { return nil, err } @@ -166,7 +171,7 @@ func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interf return taskOutput, nil } - if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName); err != nil { + if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName, d.TaskSupport.GetContext()); err != nil { return nil, err } @@ -176,3 +181,22 @@ func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interf return output, nil } + +func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interface{}, taskName string) (err error) { + if task.Export == nil { + return nil + } + + output, err := traverseAndEvaluate(task.Export.As, taskOutput, taskName, d.TaskSupport.GetContext()) + if err != nil { + return err + } + + if err = validateSchema(output, task.Export.Schema, taskName); err != nil { + return nil + } + + d.TaskSupport.SetWorkflowInstanceCtx(output) + + return nil +} diff --git a/impl/task_runner_for.go b/impl/task_runner_for.go new file mode 100644 index 0000000..825e7f6 --- /dev/null +++ b/impl/task_runner_for.go @@ -0,0 +1,135 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" + "reflect" + "strings" +) + +const ( + forTaskDefaultEach = "$item" + forTaskDefaultAt = "$index" +) + +func NewForTaskRunner(taskName string, task *model.ForTask, taskSupport TaskSupport) (*ForTaskRunner, error) { + if task == nil || task.Do == nil { + return nil, model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) + } + + doRunner, err := NewDoTaskRunner(task.Do, taskSupport) + if err != nil { + return nil, err + } + + return &ForTaskRunner{ + Task: task, + TaskName: taskName, + DoRunner: doRunner, + TaskSupport: taskSupport, + }, nil +} + +type ForTaskRunner struct { + Task *model.ForTask + TaskName string + DoRunner *DoTaskRunner + TaskSupport TaskSupport +} + +func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { + defer func() { + // clear local variables + f.TaskSupport.RemoveLocalExprVars(f.Task.For.Each, f.Task.For.At) + }() + f.sanitizeFor() + in, err := expr.TraverseAndEvaluate(f.Task.For.In, input, f.TaskSupport.GetContext()) + if err != nil { + return nil, err + } + + forOutput := input + rv := reflect.ValueOf(in) + switch rv.Kind() { + case reflect.Slice, reflect.Array: + for i := 0; i < rv.Len(); i++ { + item := rv.Index(i).Interface() + + if forOutput, err = f.processForItem(i, item, forOutput); err != nil { + return nil, err + } + if f.Task.While != "" { + whileIsTrue, err := traverseAndEvaluateBool(f.Task.While, forOutput, f.TaskSupport.GetContext()) + if err != nil { + return nil, err + } + if !whileIsTrue { + break + } + } + } + case reflect.Invalid: + return input, nil + default: + if forOutput, err = f.processForItem(0, in, forOutput); err != nil { + return nil, err + } + } + + return forOutput, nil +} + +func (f *ForTaskRunner) processForItem(idx int, item interface{}, forOutput interface{}) (interface{}, error) { + forVars := map[string]interface{}{ + f.Task.For.At: idx, + f.Task.For.Each: item, + } + // Instead of Set, we Add since other tasks in this very same context might be adding variables to the context + f.TaskSupport.AddLocalExprVars(forVars) + // output from previous iterations are merged together + var err error + forOutput, err = f.DoRunner.Run(forOutput) + if err != nil { + return nil, err + } + + return forOutput, nil +} + +func (f *ForTaskRunner) sanitizeFor() { + f.Task.For.Each = strings.TrimSpace(f.Task.For.Each) + f.Task.For.At = strings.TrimSpace(f.Task.For.At) + + if f.Task.For.Each == "" { + f.Task.For.Each = forTaskDefaultEach + } + if f.Task.For.At == "" { + f.Task.For.At = forTaskDefaultAt + } + + if !strings.HasPrefix(f.Task.For.Each, "$") { + f.Task.For.Each = "$" + f.Task.For.Each + } + if !strings.HasPrefix(f.Task.For.At, "$") { + f.Task.For.At = "$" + f.Task.For.At + } +} + +func (f *ForTaskRunner) GetTaskName() string { + return f.TaskName +} diff --git a/impl/task_runner_raise.go b/impl/task_runner_raise.go new file mode 100644 index 0000000..46014a5 --- /dev/null +++ b/impl/task_runner_raise.go @@ -0,0 +1,105 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, taskSupport TaskSupport) (*RaiseTaskRunner, error) { + if err := resolveErrorDefinition(task, taskSupport.GetWorkflowDef()); err != nil { + return nil, err + } + + if task.Raise.Error.Definition == nil { + return nil, model.NewErrValidation(fmt.Errorf("no raise configuration provided for RaiseTask %s", taskName), taskName) + } + return &RaiseTaskRunner{ + Task: task, + TaskName: taskName, + TaskSupport: taskSupport, + }, nil +} + +// TODO: can e refactored to a definition resolver callable from the context +func resolveErrorDefinition(t *model.RaiseTask, workflowDef *model.Workflow) error { + if workflowDef != nil && t.Raise.Error.Ref != nil { + notFoundErr := model.NewErrValidation(fmt.Errorf("%v error definition not found in 'uses'", t.Raise.Error.Ref), "") + if workflowDef.Use != nil && workflowDef.Use.Errors != nil { + definition, ok := workflowDef.Use.Errors[*t.Raise.Error.Ref] + if !ok { + return notFoundErr + } + t.Raise.Error.Definition = definition + return nil + } + return notFoundErr + } + return nil +} + +type RaiseTaskRunner struct { + Task *model.RaiseTask + TaskName string + TaskSupport TaskSupport +} + +var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ + model.ErrorTypeAuthentication: model.NewErrAuthentication, + model.ErrorTypeValidation: model.NewErrValidation, + model.ErrorTypeCommunication: model.NewErrCommunication, + model.ErrorTypeAuthorization: model.NewErrAuthorization, + model.ErrorTypeConfiguration: model.NewErrConfiguration, + model.ErrorTypeExpression: model.NewErrExpression, + model.ErrorTypeRuntime: model.NewErrRuntime, + model.ErrorTypeTimeout: model.NewErrTimeout, +} + +func (r *RaiseTaskRunner) Run(input interface{}) (output interface{}, err error) { + output = input + // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition + var detailResult interface{} + detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, r.TaskSupport.GetContext()) + if err != nil { + return nil, err + } + + var titleResult interface{} + titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, r.TaskSupport.GetContext()) + if err != nil { + return nil, err + } + + instance := r.TaskSupport.GetTaskReference() + + var raiseErr *model.Error + if raiseErrF, ok := raiseErrFuncMapping[r.Task.Raise.Error.Definition.Type.String()]; ok { + raiseErr = raiseErrF(fmt.Errorf("%v", detailResult), instance) + } else { + raiseErr = r.Task.Raise.Error.Definition + raiseErr.Detail = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", detailResult)) + raiseErr.Instance = &model.JsonPointerOrRuntimeExpression{Value: instance} + } + + raiseErr.Title = model.NewStringOrRuntimeExpr(fmt.Sprintf("%v", titleResult)) + err = raiseErr + + return output, err +} + +func (r *RaiseTaskRunner) GetTaskName() string { + return r.TaskName +} diff --git a/impl/task_runner_raise_test.go b/impl/task_runner_raise_test.go index 3527283..e85ac28 100644 --- a/impl/task_runner_raise_test.go +++ b/impl/task_runner_raise_test.go @@ -17,6 +17,7 @@ package impl import ( "encoding/json" "errors" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "testing" "github.com/serverlessworkflow/sdk-go/v3/model" @@ -39,7 +40,11 @@ func TestRaiseTaskRunner_WithDefinedError(t *testing.T) { }, } - runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, nil) + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + assert.NoError(t, err) + wfCtx.SetTaskReference("task_raise_defined") + + runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, newTaskSupport(withRunnerCtx(wfCtx))) assert.NoError(t, err) output, err := runner.Run(input) @@ -70,7 +75,7 @@ func TestRaiseTaskRunner_WithReferencedError(t *testing.T) { }, } - runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, nil) + runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, newTaskSupport()) assert.Error(t, err) assert.Nil(t, runner) } @@ -93,7 +98,11 @@ func TestRaiseTaskRunner_TimeoutErrorWithExpression(t *testing.T) { }, } - runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, nil) + wfCtx, err := ctx.NewWorkflowContext(&model.Workflow{}) + assert.NoError(t, err) + wfCtx.SetTaskReference("task_raise_timeout_expr") + + runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, newTaskSupport(withRunnerCtx(wfCtx))) assert.NoError(t, err) output, err := runner.Run(input) diff --git a/impl/task_runner_set.go b/impl/task_runner_set.go new file mode 100644 index 0000000..295a5f2 --- /dev/null +++ b/impl/task_runner_set.go @@ -0,0 +1,56 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewSetTaskRunner(taskName string, task *model.SetTask, taskSupport TaskSupport) (*SetTaskRunner, error) { + if task == nil || task.Set == nil { + return nil, model.NewErrValidation(fmt.Errorf("no set configuration provided for SetTask %s", taskName), taskName) + } + return &SetTaskRunner{ + Task: task, + TaskName: taskName, + TaskSupport: taskSupport, + }, nil +} + +type SetTaskRunner struct { + Task *model.SetTask + TaskName string + TaskSupport TaskSupport +} + +func (s *SetTaskRunner) GetTaskName() string { + return s.TaskName +} + +func (s *SetTaskRunner) Run(input interface{}) (output interface{}, err error) { + setObject := deepClone(s.Task.Set) + result, err := traverseAndEvaluate(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, s.TaskSupport.GetContext()) + if err != nil { + return nil, err + } + + output, ok := result.(map[string]interface{}) + if !ok { + return nil, model.NewErrRuntime(fmt.Errorf("expected output to be a map[string]interface{}, but got a different type. Got: %v", result), s.TaskName) + } + + return output, nil +} diff --git a/impl/task_set_test.go b/impl/task_set_test.go index 48ca18b..c1d5534 100644 --- a/impl/task_set_test.go +++ b/impl/task_set_test.go @@ -45,7 +45,7 @@ func TestSetTaskExecutor_Exec(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task1", setTask) + executor, err := NewSetTaskRunner("task1", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -79,7 +79,7 @@ func TestSetTaskExecutor_StaticValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_static", setTask) + executor, err := NewSetTaskRunner("task_static", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -109,7 +109,7 @@ func TestSetTaskExecutor_RuntimeExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_runtime_expr", setTask) + executor, err := NewSetTaskRunner("task_runtime_expr", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -141,7 +141,7 @@ func TestSetTaskExecutor_NestedStructures(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_nested_structures", setTask) + executor, err := NewSetTaskRunner("task_nested_structures", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -176,7 +176,7 @@ func TestSetTaskExecutor_StaticAndDynamicValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_static_dynamic", setTask) + executor, err := NewSetTaskRunner("task_static_dynamic", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -201,7 +201,7 @@ func TestSetTaskExecutor_MissingInputData(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_missing_input", setTask) + executor, err := NewSetTaskRunner("task_missing_input", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -220,7 +220,7 @@ func TestSetTaskExecutor_ExpressionsWithFunctions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_expr_functions", setTask) + executor, err := NewSetTaskRunner("task_expr_functions", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -246,7 +246,7 @@ func TestSetTaskExecutor_ConditionalExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_conditional_expr", setTask) + executor, err := NewSetTaskRunner("task_conditional_expr", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -273,7 +273,7 @@ func TestSetTaskExecutor_ArrayDynamicIndex(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_array_indexing", setTask) + executor, err := NewSetTaskRunner("task_array_indexing", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -299,7 +299,7 @@ func TestSetTaskExecutor_NestedConditionalLogic(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_nested_condition", setTask) + executor, err := NewSetTaskRunner("task_nested_condition", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -323,7 +323,7 @@ func TestSetTaskExecutor_DefaultValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_default_values", setTask) + executor, err := NewSetTaskRunner("task_default_values", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -363,7 +363,7 @@ func TestSetTaskExecutor_ComplexNestedStructures(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_complex_nested", setTask) + executor, err := NewSetTaskRunner("task_complex_nested", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) @@ -399,7 +399,7 @@ func TestSetTaskExecutor_MultipleExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_multiple_expr", setTask) + executor, err := NewSetTaskRunner("task_multiple_expr", setTask, newTaskSupport()) assert.NoError(t, err) output, err := executor.Run(input) diff --git a/impl/testdata/for_nested_loops.yaml b/impl/testdata/for_nested_loops.yaml new file mode 100644 index 0000000..3bef556 --- /dev/null +++ b/impl/testdata/for_nested_loops.yaml @@ -0,0 +1,35 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: for-tests + name: nested-loops + version: '1.0.0' +do: + - outerLoop: + for: + in: ${ .fruits } + each: fruit + at: fruitIdx + do: + - innerLoop: + for: + in: ${ $input.colors } + each: color + at: colorIdx + do: + - combinePair: + set: + matrix: ${ .matrix + [[$fruit, $color]] } diff --git a/impl/testdata/for_sum_numbers.yaml b/impl/testdata/for_sum_numbers.yaml new file mode 100644 index 0000000..afc81e9 --- /dev/null +++ b/impl/testdata/for_sum_numbers.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: for-tests + name: sum-numbers + version: '1.0.0' +do: + - sumLoop: + for: + in: ${ .numbers } + do: + - addNumber: + set: + total: ${ .total + $item } + - finalize: + set: + result: ${ .total } diff --git a/impl/testdata/raise_inline.yaml b/impl/testdata/raise_inline.yaml index c464877..940528a 100644 --- a/impl/testdata/raise_inline.yaml +++ b/impl/testdata/raise_inline.yaml @@ -24,4 +24,4 @@ do: type: https://serverlessworkflow.io/spec/1.0.0/errors/validation status: 400 title: Validation Error - detail: ${ "Invalid input provided to workflow '\( $workflow.definition.document.name )'" } + detail: ${ "Invalid input provided to workflow \($workflow.definition.document.name)" } diff --git a/impl/utils.go b/impl/utils.go index 2cdf952..20b2360 100644 --- a/impl/utils.go +++ b/impl/utils.go @@ -15,7 +15,8 @@ package impl import ( - "github.com/serverlessworkflow/sdk-go/v3/expr" + "context" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -51,31 +52,27 @@ func validateSchema(data interface{}, schema *model.Schema, taskName string) err return nil } -func traverseAndEvaluate(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string) (output interface{}, err error) { +func traverseAndEvaluate(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string, wfCtx context.Context) (output interface{}, err error) { if runtimeExpr == nil { return input, nil } - output, err = expr.TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input) + output, err = expr.TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input, wfCtx) if err != nil { return nil, model.NewErrExpression(err, taskName) } return output, nil } -func processIO(data interface{}, schema *model.Schema, transformation *model.ObjectOrRuntimeExpr, taskName string) (interface{}, error) { - if schema != nil { - if err := validateSchema(data, schema, taskName); err != nil { - return nil, err - } +func traverseAndEvaluateBool(runtimeExpr string, input interface{}, wfCtx context.Context) (bool, error) { + if len(runtimeExpr) == 0 { + return false, nil } - - if transformation != nil { - transformed, err := traverseAndEvaluate(transformation, data, taskName) - if err != nil { - return nil, err - } - return transformed, nil + output, err := expr.TraverseAndEvaluate(runtimeExpr, input, wfCtx) + if err != nil { + return false, nil } - - return data, nil + if result, ok := output.(bool); ok { + return result, nil + } + return false, nil } diff --git a/model/errors.go b/model/errors.go index eeef71c..9700f17 100644 --- a/model/errors.go +++ b/model/errors.go @@ -18,7 +18,6 @@ import ( "encoding/json" "errors" "fmt" - "reflect" "strings" ) @@ -77,10 +76,10 @@ func (e *Error) WithInstanceRef(workflow *Workflow, taskName string) *Error { } // Generate a JSON pointer reference for the task within the workflow - instance, pointerErr := GenerateJSONPointer(workflow, taskName) - if pointerErr == nil { - e.Instance = &JsonPointerOrRuntimeExpression{Value: instance} - } + //instance, pointerErr := GenerateJSONPointer(workflow, taskName) + //if pointerErr == nil { + // e.Instance = &JsonPointerOrRuntimeExpression{Value: instance} + //} // TODO: log the pointer error return e @@ -268,57 +267,3 @@ func ErrorFromJSON(jsonStr string) (*Error, error) { } // JsonPointer functions - -func findJsonPointer(data interface{}, target string, path string) (string, bool) { - switch node := data.(type) { - case map[string]interface{}: - for key, value := range node { - newPath := fmt.Sprintf("%s/%s", path, key) - if key == target { - return newPath, true - } - if result, found := findJsonPointer(value, target, newPath); found { - return result, true - } - } - case []interface{}: - for i, item := range node { - newPath := fmt.Sprintf("%s/%d", path, i) - if result, found := findJsonPointer(item, target, newPath); found { - return result, true - } - } - } - return "", false -} - -// GenerateJSONPointer Function to generate JSON Pointer from a Workflow reference -func GenerateJSONPointer(workflow *Workflow, targetNode interface{}) (string, error) { - // Convert struct to JSON - jsonData, err := json.Marshal(workflow) - if err != nil { - return "", fmt.Errorf("error marshalling to JSON: %w", err) - } - - // Convert JSON to a generic map for traversal - var jsonMap map[string]interface{} - if err := json.Unmarshal(jsonData, &jsonMap); err != nil { - return "", fmt.Errorf("error unmarshalling JSON: %w", err) - } - - transformedNode := "" - switch node := targetNode.(type) { - case string: - transformedNode = node - default: - transformedNode = strings.ToLower(reflect.TypeOf(targetNode).Name()) - } - - // Search for the target node - jsonPointer, found := findJsonPointer(jsonMap, transformedNode, "") - if !found { - return "", fmt.Errorf("node '%s' not found", targetNode) - } - - return jsonPointer, nil -} diff --git a/model/objects.go b/model/objects.go index d79ac55..2bb8dd9 100644 --- a/model/objects.go +++ b/model/objects.go @@ -73,6 +73,12 @@ type ObjectOrRuntimeExpr struct { Value interface{} `json:"-" validate:"object_or_runtime_expr"` // Custom validation tag. } +func NewObjectOrRuntimeExpr(value interface{}) *ObjectOrRuntimeExpr { + return &ObjectOrRuntimeExpr{ + Value: value, + } +} + func (o *ObjectOrRuntimeExpr) String() string { return fmt.Sprintf("%v", o.Value) } diff --git a/model/runtime_expression.go b/model/runtime_expression.go index 6a056cb..ae04e46 100644 --- a/model/runtime_expression.go +++ b/model/runtime_expression.go @@ -17,8 +17,8 @@ package model import ( "encoding/json" "fmt" - - "github.com/serverlessworkflow/sdk-go/v3/expr" + "github.com/itchyny/gojq" + "strings" ) // RuntimeExpression represents a runtime expression. @@ -34,9 +34,34 @@ func NewExpr(runtimeExpression string) *RuntimeExpression { return &RuntimeExpression{Value: runtimeExpression} } +// IsStrictExpr returns true if the string is enclosed in `${ }` +func IsStrictExpr(expression string) bool { + return strings.HasPrefix(expression, "${") && strings.HasSuffix(expression, "}") +} + +// SanitizeExpr processes the expression to ensure it's ready for evaluation +// It removes `${}` if present and replaces single quotes with double quotes +func SanitizeExpr(expression string) string { + // Remove `${}` enclosure if present + if IsStrictExpr(expression) { + expression = strings.TrimSpace(expression[2 : len(expression)-1]) + } + + // Replace single quotes with double quotes + expression = strings.ReplaceAll(expression, "'", "\"") + + return expression +} + +func IsValidExpr(expression string) bool { + expression = SanitizeExpr(expression) + _, err := gojq.Parse(expression) + return err == nil +} + // IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. func (r *RuntimeExpression) IsValid() bool { - return expr.IsValid(r.Value) + return IsValidExpr(r.Value) } // UnmarshalJSON implements custom unmarshalling for RuntimeExpression. diff --git a/model/runtime_expression_test.go b/model/runtime_expression_test.go index 296e1de..770af70 100644 --- a/model/runtime_expression_test.go +++ b/model/runtime_expression_test.go @@ -68,3 +68,152 @@ func TestRuntimeExpressionUnmarshalJSON(t *testing.T) { type RuntimeExpressionAcme struct { Expression RuntimeExpression `json:"expression"` } + +func TestIsStrictExpr(t *testing.T) { + tests := []struct { + name string + expression string + want bool + }{ + { + name: "StrictExpr with braces", + expression: "${.some.path}", + want: true, + }, + { + name: "Missing closing brace", + expression: "${.some.path", + want: false, + }, + { + name: "Missing opening brace", + expression: ".some.path}", + want: false, + }, + { + name: "Empty string", + expression: "", + want: false, + }, + { + name: "No braces at all", + expression: ".some.path", + want: false, + }, + { + name: "With spaces but still correct", + expression: "${ .some.path }", + want: true, + }, + { + name: "Only braces", + expression: "${}", + want: true, // Technically matches prefix+suffix + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := IsStrictExpr(tc.expression) + if got != tc.want { + t.Errorf("IsStrictExpr(%q) = %v, want %v", tc.expression, got, tc.want) + } + }) + } +} + +func TestSanitize(t *testing.T) { + tests := []struct { + name string + expression string + want string + }{ + { + name: "Remove braces and replace single quotes", + expression: "${ 'some.path' }", + want: `"some.path"`, + }, + { + name: "Already sanitized string, no braces", + expression: ".some.path", + want: ".some.path", + }, + { + name: "Multiple single quotes", + expression: "${ 'foo' + 'bar' }", + want: `"foo" + "bar"`, + }, + { + name: "Only braces with spaces", + expression: "${ }", + want: "", + }, + { + name: "No braces, just single quotes to be replaced", + expression: "'some.path'", + want: `"some.path"`, + }, + { + name: "Nothing to sanitize", + expression: "", + want: "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := SanitizeExpr(tc.expression) + if got != tc.want { + t.Errorf("Sanitize(%q) = %q, want %q", tc.expression, got, tc.want) + } + }) + } +} + +func TestIsValid(t *testing.T) { + tests := []struct { + name string + expression string + want bool + }{ + { + name: "Valid expression - simple path", + expression: "${ .foo }", + want: true, + }, + { + name: "Valid expression - array slice", + expression: "${ .arr[0] }", + want: true, + }, + { + name: "Invalid syntax", + expression: "${ .foo( }", + want: false, + }, + { + name: "No braces but valid JQ (directly provided)", + expression: ".bar", + want: true, + }, + { + name: "Empty expression", + expression: "", + want: true, // empty is parseable but yields an empty query + }, + { + name: "Invalid bracket usage", + expression: "${ .arr[ }", + want: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := IsValidExpr(tc.expression) + if got != tc.want { + t.Errorf("IsValid(%q) = %v, want %v", tc.expression, got, tc.want) + } + }) + } +} diff --git a/model/workflow.go b/model/workflow.go index 313a9e5..15dba7e 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -31,6 +31,20 @@ type Workflow struct { Schedule *Schedule `json:"schedule,omitempty" yaml:"schedule,omitempty"` } +// AsMap converts the Workflow struct into a JSON Map object. +func (w *Workflow) AsMap() (map[string]interface{}, error) { + jsonBytes, err := json.Marshal(w) + if err != nil { + return nil, err + } + + var m map[string]interface{} + if err = json.Unmarshal(jsonBytes, &m); err != nil { + return nil, err + } + return m, nil +} + func (w *Workflow) MarshalYAML() (interface{}, error) { // Create a map to hold fields data := map[string]interface{}{ From 45bb41e088f64f765a25ee6935339fe72b66bf28 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Mon, 31 Mar 2025 13:37:26 -0300 Subject: [PATCH 103/110] Fix #233 - Add support to 'switch' task (#234) * Fix #233 - Add support to 'switch' task Signed-off-by: Ricardo Zanini * Fix headers and linters Signed-off-by: Ricardo Zanini --------- Signed-off-by: Ricardo Zanini --- README.md | 4 +- impl/ctx/context.go | 5 ++- impl/expr/expr.go | 1 + impl/json_pointer.go | 3 +- impl/json_pointer_test.go | 11 +++--- impl/runner.go | 3 +- impl/runner_test.go | 55 ++++++++++++++++++++++++-- impl/task_runner.go | 3 +- impl/task_runner_do.go | 47 +++++++++++++++++++++- impl/task_runner_for.go | 5 ++- impl/task_runner_raise.go | 1 + impl/task_runner_raise_test.go | 3 +- impl/task_runner_set.go | 1 + impl/testdata/switch_match.yaml | 43 ++++++++++++++++++++ impl/testdata/switch_with_default.yaml | 43 ++++++++++++++++++++ impl/utils.go | 1 + model/runtime_expression.go | 11 +++++- 17 files changed, 220 insertions(+), 20 deletions(-) create mode 100644 impl/testdata/switch_match.yaml create mode 100644 impl/testdata/switch_with_default.yaml diff --git a/README.md b/README.md index f05e54c..1a6654e 100644 --- a/README.md +++ b/README.md @@ -132,7 +132,7 @@ The table below lists the current state of this implementation. This table is a | Task Raise | βœ… | | Task Run | ❌ | | Task Set | βœ… | -| Task Switch | ❌ | +| Task Switch | βœ… | | Task Try | ❌ | | Task Wait | ❌ | | Lifecycle Events | 🟑 | @@ -157,7 +157,7 @@ The table below lists the current state of this implementation. This table is a | AsyncAPI Server | ❌ | | AsyncAPI Outbound Message | ❌ | | AsyncAPI Subscription | ❌ | -| Workflow Definition Reference | ❌ | +| Workflow Definition Reference | βœ… | | Subscription Iterator | ❌ | We love contributions! Our aim is to have a complete implementation to serve as a reference or to become a project on its own to favor the CNCF Ecosystem. diff --git a/impl/ctx/context.go b/impl/ctx/context.go index 1f0d716..f013507 100644 --- a/impl/ctx/context.go +++ b/impl/ctx/context.go @@ -19,10 +19,11 @@ import ( "encoding/json" "errors" "fmt" - "github.com/google/uuid" - "github.com/serverlessworkflow/sdk-go/v3/model" "sync" "time" + + "github.com/google/uuid" + "github.com/serverlessworkflow/sdk-go/v3/model" ) var ErrWorkflowContextNotFound = errors.New("workflow context not found") diff --git a/impl/expr/expr.go b/impl/expr/expr.go index 03d558e..60e2765 100644 --- a/impl/expr/expr.go +++ b/impl/expr/expr.go @@ -18,6 +18,7 @@ import ( "context" "errors" "fmt" + "github.com/itchyny/gojq" "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" diff --git a/impl/json_pointer.go b/impl/json_pointer.go index 4d276ff..dedaaf3 100644 --- a/impl/json_pointer.go +++ b/impl/json_pointer.go @@ -17,9 +17,10 @@ package impl import ( "encoding/json" "fmt" - "github.com/serverlessworkflow/sdk-go/v3/model" "reflect" "strings" + + "github.com/serverlessworkflow/sdk-go/v3/model" ) func findJsonPointer(data interface{}, target string, path string) (string, bool) { diff --git a/impl/json_pointer_test.go b/impl/json_pointer_test.go index 76077bc..aeec1e4 100644 --- a/impl/json_pointer_test.go +++ b/impl/json_pointer_test.go @@ -15,9 +15,10 @@ package impl import ( + "testing" + "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/stretchr/testify/assert" - "testing" ) // TestGenerateJSONPointer_SimpleTask tests a simple workflow task. @@ -60,8 +61,8 @@ func TestGenerateJSONPointer_ForkTask(t *testing.T) { Fork: model.ForkTaskConfiguration{ Compete: true, Branches: &model.TaskList{ - {Key: "callNurse", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/nurses")}}}, - {Key: "callDoctor", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/doctor")}}}, + &model.TaskItem{Key: "callNurse", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/nurses")}}}, + &model.TaskItem{Key: "callDoctor", Task: &model.CallHTTP{Call: "http", With: model.HTTPArguments{Method: "put", Endpoint: model.NewEndpoint("https://hospital.com/api/alert/doctor")}}}, }, }, }, @@ -85,12 +86,12 @@ func TestGenerateJSONPointer_DeepNestedTask(t *testing.T) { Fork: model.ForkTaskConfiguration{ Compete: false, Branches: &model.TaskList{ - { + &model.TaskItem{ Key: "branchA", Task: &model.ForkTask{ Fork: model.ForkTaskConfiguration{ Branches: &model.TaskList{ - { + &model.TaskItem{ Key: "deepTask", Task: &model.SetTask{Set: map[string]interface{}{"result": "done"}}, }, diff --git a/impl/runner.go b/impl/runner.go index 1c9ad8b..5328ee3 100644 --- a/impl/runner.go +++ b/impl/runner.go @@ -17,9 +17,10 @@ package impl import ( "context" "fmt" + "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" - "time" ) var _ WorkflowRunner = &workflowRunnerImpl{} diff --git a/impl/runner_test.go b/impl/runner_test.go index 32c9c86..9bb599c 100644 --- a/impl/runner_test.go +++ b/impl/runner_test.go @@ -17,13 +17,14 @@ package impl import ( "context" "fmt" + "os" + "path/filepath" + "testing" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/serverlessworkflow/sdk-go/v3/parser" "github.com/stretchr/testify/assert" - "os" - "path/filepath" - "testing" ) type taskSupportOpts func(*workflowRunnerImpl) @@ -407,3 +408,51 @@ func TestForTaskRunner_Run(t *testing.T) { }) } + +func TestSwitchTaskRunner_Run(t *testing.T) { + t.Run("Color is red", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "red", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"red"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Color is green", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "green", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"green"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) + + t.Run("Color is blue", func(t *testing.T) { + workflowPath := "./testdata/switch_match.yaml" + input := map[string]interface{}{ + "color": "blue", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} + +func TestSwitchTaskRunner_DefaultCase(t *testing.T) { + t.Run("Color is unknown, should match default", func(t *testing.T) { + workflowPath := "./testdata/switch_with_default.yaml" + input := map[string]interface{}{ + "color": "yellow", + } + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"default"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} diff --git a/impl/task_runner.go b/impl/task_runner.go index a302bca..6d9069d 100644 --- a/impl/task_runner.go +++ b/impl/task_runner.go @@ -16,9 +16,10 @@ package impl import ( "context" + "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" - "time" ) var _ TaskRunner = &SetTaskRunner{} diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go index 75249b1..81ef374 100644 --- a/impl/task_runner_do.go +++ b/impl/task_runner_do.go @@ -16,9 +16,10 @@ package impl import ( "fmt" + "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" - "time" ) // NewTaskRunner creates a TaskRunner instance based on the task type. @@ -86,6 +87,24 @@ func (d *DoTaskRunner) runTasks(input interface{}, tasks *model.TaskList) (outpu } d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.PendingStatus) + + // Check if this task is a SwitchTask and handle it + if switchTask, ok := currentTask.Task.(*model.SwitchTask); ok { + flowDirective, err := d.evaluateSwitchTask(input, currentTask.Key, switchTask) + if err != nil { + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) + return output, err + } + d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) + + // Process FlowDirective: update idx/currentTask accordingly + idx, currentTask = tasks.KeyAndIndex(flowDirective.Value) + if currentTask == nil { + return nil, fmt.Errorf("flow directive target '%s' not found", flowDirective.Value) + } + continue + } + runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, d.TaskSupport) if err != nil { return output, err @@ -116,6 +135,32 @@ func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (b return true, nil } +func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskKey string, switchTask *model.SwitchTask) (*model.FlowDirective, error) { + var defaultThen *model.FlowDirective + for _, switchItem := range switchTask.Switch { + for _, switchCase := range switchItem { + if switchCase.When == nil { + defaultThen = switchCase.Then + continue + } + result, err := traverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, d.TaskSupport.GetContext()) + if err != nil { + return nil, model.NewErrExpression(err, taskKey) + } + if result { + if switchCase.Then == nil { + return nil, model.NewErrExpression(fmt.Errorf("missing 'then' directive in matched switch case"), taskKey) + } + return switchCase.Then, nil + } + } + } + if defaultThen != nil { + return defaultThen, nil + } + return nil, model.NewErrExpression(fmt.Errorf("no matching switch case"), taskKey) +} + // runTask executes an individual task. func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { taskName := runner.GetTaskName() diff --git a/impl/task_runner_for.go b/impl/task_runner_for.go index 825e7f6..fb7bcff 100644 --- a/impl/task_runner_for.go +++ b/impl/task_runner_for.go @@ -16,10 +16,11 @@ package impl import ( "fmt" - "github.com/serverlessworkflow/sdk-go/v3/impl/expr" - "github.com/serverlessworkflow/sdk-go/v3/model" "reflect" "strings" + + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/model" ) const ( diff --git a/impl/task_runner_raise.go b/impl/task_runner_raise.go index 46014a5..b59f01d 100644 --- a/impl/task_runner_raise.go +++ b/impl/task_runner_raise.go @@ -16,6 +16,7 @@ package impl import ( "fmt" + "github.com/serverlessworkflow/sdk-go/v3/model" ) diff --git a/impl/task_runner_raise_test.go b/impl/task_runner_raise_test.go index e85ac28..0c55f3a 100644 --- a/impl/task_runner_raise_test.go +++ b/impl/task_runner_raise_test.go @@ -17,9 +17,10 @@ package impl import ( "encoding/json" "errors" - "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "testing" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" + "github.com/serverlessworkflow/sdk-go/v3/model" "github.com/stretchr/testify/assert" ) diff --git a/impl/task_runner_set.go b/impl/task_runner_set.go index 295a5f2..fc40e74 100644 --- a/impl/task_runner_set.go +++ b/impl/task_runner_set.go @@ -16,6 +16,7 @@ package impl import ( "fmt" + "github.com/serverlessworkflow/sdk-go/v3/model" ) diff --git a/impl/testdata/switch_match.yaml b/impl/testdata/switch_match.yaml new file mode 100644 index 0000000..4f913af --- /dev/null +++ b/impl/testdata/switch_match.yaml @@ -0,0 +1,43 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: switch-match + version: '1.0.0' +do: + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - blue: + when: '.color == "blue"' + then: setBlue + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: end + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' + then: end diff --git a/impl/testdata/switch_with_default.yaml b/impl/testdata/switch_with_default.yaml new file mode 100644 index 0000000..8a4f1b9 --- /dev/null +++ b/impl/testdata/switch_with_default.yaml @@ -0,0 +1,43 @@ +# Copyright 2025 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +document: + dsl: '1.0.0' + namespace: default + name: switch-with-default + version: '1.0.0' + +do: + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - fallback: + then: setDefault + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: end + - setDefault: + set: + colors: '${ .colors + [ "default" ] }' + then: end diff --git a/impl/utils.go b/impl/utils.go index 20b2360..a62559d 100644 --- a/impl/utils.go +++ b/impl/utils.go @@ -16,6 +16,7 @@ package impl import ( "context" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" "github.com/serverlessworkflow/sdk-go/v3/model" ) diff --git a/model/runtime_expression.go b/model/runtime_expression.go index ae04e46..adef566 100644 --- a/model/runtime_expression.go +++ b/model/runtime_expression.go @@ -17,8 +17,9 @@ package model import ( "encoding/json" "fmt" - "github.com/itchyny/gojq" "strings" + + "github.com/itchyny/gojq" ) // RuntimeExpression represents a runtime expression. @@ -59,6 +60,14 @@ func IsValidExpr(expression string) bool { return err == nil } +// NormalizeExpr adds ${} to the given string +func NormalizeExpr(expr string) string { + if strings.HasPrefix(expr, "${") { + return expr + } + return fmt.Sprintf("${%s}", expr) +} + // IsValid checks if the RuntimeExpression value is valid, handling both with and without `${}`. func (r *RuntimeExpression) IsValid() bool { return IsValidExpr(r.Value) From f72901259dd88dcc36baad5e1a9baa86ae84d783 Mon Sep 17 00:00:00 2001 From: Francisco Javier Tirado Sarti <65240126+fjtirado@users.noreply.github.com> Date: Mon, 7 Apr 2025 23:43:47 +0200 Subject: [PATCH 104/110] Refactoring run method (#236) * Refactoring run method Signed-off-by: fjtirado * Zaninis comments Signed-off-by: fjtirado --------- Signed-off-by: fjtirado --- impl/runner.go | 4 +- impl/task_runner.go | 2 +- impl/task_runner_call_http.go | 44 +++++++++++++++ impl/task_runner_do.go | 100 ++++++++++++++++----------------- impl/task_runner_for.go | 36 ++++++------ impl/task_runner_raise.go | 22 ++++---- impl/task_runner_raise_test.go | 12 ++-- impl/task_runner_set.go | 16 +++--- impl/task_set_test.go | 52 ++++++++--------- 9 files changed, 164 insertions(+), 124 deletions(-) create mode 100644 impl/task_runner_call_http.go diff --git a/impl/runner.go b/impl/runner.go index 5328ee3..362db1b 100644 --- a/impl/runner.go +++ b/impl/runner.go @@ -137,12 +137,12 @@ func (wr *workflowRunnerImpl) Run(input interface{}) (output interface{}, err er wr.RunnerCtx.SetInput(input) // Run tasks sequentially wr.RunnerCtx.SetStatus(ctx.RunningStatus) - doRunner, err := NewDoTaskRunner(wr.Workflow.Do, wr) + doRunner, err := NewDoTaskRunner(wr.Workflow.Do) if err != nil { return nil, err } wr.RunnerCtx.SetStartedAt(time.Now()) - output, err = doRunner.Run(wr.RunnerCtx.GetInput()) + output, err = doRunner.Run(wr.RunnerCtx.GetInput(), wr) if err != nil { return nil, err } diff --git a/impl/task_runner.go b/impl/task_runner.go index 6d9069d..ea7b6dd 100644 --- a/impl/task_runner.go +++ b/impl/task_runner.go @@ -28,7 +28,7 @@ var _ TaskRunner = &ForTaskRunner{} var _ TaskRunner = &DoTaskRunner{} type TaskRunner interface { - Run(input interface{}) (interface{}, error) + Run(input interface{}, taskSupport TaskSupport) (interface{}, error) GetTaskName() string } diff --git a/impl/task_runner_call_http.go b/impl/task_runner_call_http.go new file mode 100644 index 0000000..3093506 --- /dev/null +++ b/impl/task_runner_call_http.go @@ -0,0 +1,44 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +type CallHTTPTaskRunner struct { + TaskName string +} + +func NewCallHttpRunner(taskName string, task *model.CallHTTP) (taskRunner *CallHTTPTaskRunner, err error) { + if task == nil { + err = model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) + } else { + taskRunner = new(CallHTTPTaskRunner) + taskRunner.TaskName = taskName + } + return +} + +func (f *CallHTTPTaskRunner) Run(input interface{}, taskSupport TaskSupport) (interface{}, error) { + return input, nil + +} + +func (f *CallHTTPTaskRunner) GetTaskName() string { + return f.TaskName +} diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go index 81ef374..0301009 100644 --- a/impl/task_runner_do.go +++ b/impl/task_runner_do.go @@ -23,38 +23,38 @@ import ( ) // NewTaskRunner creates a TaskRunner instance based on the task type. -func NewTaskRunner(taskName string, task model.Task, taskSupport TaskSupport) (TaskRunner, error) { +func NewTaskRunner(taskName string, task model.Task, workflowDef *model.Workflow) (TaskRunner, error) { switch t := task.(type) { case *model.SetTask: - return NewSetTaskRunner(taskName, t, taskSupport) + return NewSetTaskRunner(taskName, t) case *model.RaiseTask: - return NewRaiseTaskRunner(taskName, t, taskSupport) + return NewRaiseTaskRunner(taskName, t, workflowDef) case *model.DoTask: - return NewDoTaskRunner(t.Do, taskSupport) + return NewDoTaskRunner(t.Do) case *model.ForTask: - return NewForTaskRunner(taskName, t, taskSupport) + return NewForTaskRunner(taskName, t) + case *model.CallHTTP: + return NewCallHttpRunner(taskName, t) default: return nil, fmt.Errorf("unsupported task type '%T' for task '%s'", t, taskName) } } -func NewDoTaskRunner(taskList *model.TaskList, taskSupport TaskSupport) (*DoTaskRunner, error) { +func NewDoTaskRunner(taskList *model.TaskList) (*DoTaskRunner, error) { return &DoTaskRunner{ - TaskList: taskList, - TaskSupport: taskSupport, + TaskList: taskList, }, nil } type DoTaskRunner struct { - TaskList *model.TaskList - TaskSupport TaskSupport + TaskList *model.TaskList } -func (d *DoTaskRunner) Run(input interface{}) (output interface{}, err error) { +func (d *DoTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { if d.TaskList == nil { return input, nil } - return d.runTasks(input, d.TaskList) + return d.runTasks(input, taskSupport) } func (d *DoTaskRunner) GetTaskName() string { @@ -62,71 +62,71 @@ func (d *DoTaskRunner) GetTaskName() string { } // runTasks runs all defined tasks sequentially. -func (d *DoTaskRunner) runTasks(input interface{}, tasks *model.TaskList) (output interface{}, err error) { +func (d *DoTaskRunner) runTasks(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { output = input - if tasks == nil { + if d.TaskList == nil { return output, nil } idx := 0 - currentTask := (*tasks)[idx] + currentTask := (*d.TaskList)[idx] for currentTask != nil { - if err = d.TaskSupport.SetTaskDef(currentTask); err != nil { + if err = taskSupport.SetTaskDef(currentTask); err != nil { return nil, err } - if err = d.TaskSupport.SetTaskReferenceFromName(currentTask.Key); err != nil { + if err = taskSupport.SetTaskReferenceFromName(currentTask.Key); err != nil { return nil, err } - if shouldRun, err := d.shouldRunTask(input, currentTask); err != nil { + if shouldRun, err := d.shouldRunTask(input, taskSupport, currentTask); err != nil { return output, err } else if !shouldRun { - idx, currentTask = tasks.Next(idx) + idx, currentTask = d.TaskList.Next(idx) continue } - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.PendingStatus) + taskSupport.SetTaskStatus(currentTask.Key, ctx.PendingStatus) // Check if this task is a SwitchTask and handle it if switchTask, ok := currentTask.Task.(*model.SwitchTask); ok { - flowDirective, err := d.evaluateSwitchTask(input, currentTask.Key, switchTask) + flowDirective, err := d.evaluateSwitchTask(input, taskSupport, currentTask.Key, switchTask) if err != nil { - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) + taskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) return output, err } - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) + taskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) // Process FlowDirective: update idx/currentTask accordingly - idx, currentTask = tasks.KeyAndIndex(flowDirective.Value) + idx, currentTask = d.TaskList.KeyAndIndex(flowDirective.Value) if currentTask == nil { return nil, fmt.Errorf("flow directive target '%s' not found", flowDirective.Value) } continue } - runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, d.TaskSupport) + runner, err := NewTaskRunner(currentTask.Key, currentTask.Task, taskSupport.GetWorkflowDef()) if err != nil { return output, err } - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.RunningStatus) - if output, err = d.runTask(input, runner, currentTask.Task.GetBase()); err != nil { - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) + taskSupport.SetTaskStatus(currentTask.Key, ctx.RunningStatus) + if output, err = d.runTask(input, taskSupport, runner, currentTask.Task.GetBase()); err != nil { + taskSupport.SetTaskStatus(currentTask.Key, ctx.FaultedStatus) return output, err } - d.TaskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) + taskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) input = deepCloneValue(output) - idx, currentTask = tasks.Next(idx) + idx, currentTask = d.TaskList.Next(idx) } return output, nil } -func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (bool, error) { +func (d *DoTaskRunner) shouldRunTask(input interface{}, taskSupport TaskSupport, task *model.TaskItem) (bool, error) { if task.GetBase().If != nil { - output, err := traverseAndEvaluateBool(task.GetBase().If.String(), input, d.TaskSupport.GetContext()) + output, err := traverseAndEvaluateBool(task.GetBase().If.String(), input, taskSupport.GetContext()) if err != nil { return false, model.NewErrExpression(err, task.Key) } @@ -135,7 +135,7 @@ func (d *DoTaskRunner) shouldRunTask(input interface{}, task *model.TaskItem) (b return true, nil } -func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskKey string, switchTask *model.SwitchTask) (*model.FlowDirective, error) { +func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskSupport TaskSupport, taskKey string, switchTask *model.SwitchTask) (*model.FlowDirective, error) { var defaultThen *model.FlowDirective for _, switchItem := range switchTask.Switch { for _, switchCase := range switchItem { @@ -143,7 +143,7 @@ func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskKey string, swi defaultThen = switchCase.Then continue } - result, err := traverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, d.TaskSupport.GetContext()) + result, err := traverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, taskSupport.GetContext()) if err != nil { return nil, model.NewErrExpression(err, taskKey) } @@ -162,31 +162,31 @@ func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskKey string, swi } // runTask executes an individual task. -func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { +func (d *DoTaskRunner) runTask(input interface{}, taskSupport TaskSupport, runner TaskRunner, task *model.TaskBase) (output interface{}, err error) { taskName := runner.GetTaskName() - d.TaskSupport.SetTaskStartedAt(time.Now()) - d.TaskSupport.SetTaskRawInput(input) - d.TaskSupport.SetTaskName(taskName) + taskSupport.SetTaskStartedAt(time.Now()) + taskSupport.SetTaskRawInput(input) + taskSupport.SetTaskName(taskName) if task.Input != nil { - if input, err = d.processTaskInput(task, input, taskName); err != nil { + if input, err = d.processTaskInput(task, input, taskSupport, taskName); err != nil { return nil, err } } - output, err = runner.Run(input) + output, err = runner.Run(input, taskSupport) if err != nil { return nil, err } - d.TaskSupport.SetTaskRawOutput(output) + taskSupport.SetTaskRawOutput(output) - if output, err = d.processTaskOutput(task, output, taskName); err != nil { + if output, err = d.processTaskOutput(task, output, taskSupport, taskName); err != nil { return nil, err } - if err = d.processTaskExport(task, output, taskName); err != nil { + if err = d.processTaskExport(task, output, taskSupport, taskName); err != nil { return nil, err } @@ -194,7 +194,7 @@ func (d *DoTaskRunner) runTask(input interface{}, runner TaskRunner, task *model } // processTaskInput processes task input validation and transformation. -func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interface{}, taskName string) (output interface{}, err error) { +func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interface{}, taskSupport TaskSupport, taskName string) (output interface{}, err error) { if task.Input == nil { return taskInput, nil } @@ -203,7 +203,7 @@ func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interfac return nil, err } - if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName, d.TaskSupport.GetContext()); err != nil { + if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName, taskSupport.GetContext()); err != nil { return nil, err } @@ -211,12 +211,12 @@ func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interfac } // processTaskOutput processes task output validation and transformation. -func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interface{}, taskName string) (output interface{}, err error) { +func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interface{}, taskSupport TaskSupport, taskName string) (output interface{}, err error) { if task.Output == nil { return taskOutput, nil } - if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName, d.TaskSupport.GetContext()); err != nil { + if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName, taskSupport.GetContext()); err != nil { return nil, err } @@ -227,12 +227,12 @@ func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interf return output, nil } -func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interface{}, taskName string) (err error) { +func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interface{}, taskSupport TaskSupport, taskName string) (err error) { if task.Export == nil { return nil } - output, err := traverseAndEvaluate(task.Export.As, taskOutput, taskName, d.TaskSupport.GetContext()) + output, err := traverseAndEvaluate(task.Export.As, taskOutput, taskName, taskSupport.GetContext()) if err != nil { return err } @@ -241,7 +241,7 @@ func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interf return nil } - d.TaskSupport.SetWorkflowInstanceCtx(output) + taskSupport.SetWorkflowInstanceCtx(output) return nil } diff --git a/impl/task_runner_for.go b/impl/task_runner_for.go index fb7bcff..a53348d 100644 --- a/impl/task_runner_for.go +++ b/impl/task_runner_for.go @@ -28,38 +28,36 @@ const ( forTaskDefaultAt = "$index" ) -func NewForTaskRunner(taskName string, task *model.ForTask, taskSupport TaskSupport) (*ForTaskRunner, error) { +func NewForTaskRunner(taskName string, task *model.ForTask) (*ForTaskRunner, error) { if task == nil || task.Do == nil { return nil, model.NewErrValidation(fmt.Errorf("invalid For task %s", taskName), taskName) } - doRunner, err := NewDoTaskRunner(task.Do, taskSupport) + doRunner, err := NewDoTaskRunner(task.Do) if err != nil { return nil, err } return &ForTaskRunner{ - Task: task, - TaskName: taskName, - DoRunner: doRunner, - TaskSupport: taskSupport, + Task: task, + TaskName: taskName, + DoRunner: doRunner, }, nil } type ForTaskRunner struct { - Task *model.ForTask - TaskName string - DoRunner *DoTaskRunner - TaskSupport TaskSupport + Task *model.ForTask + TaskName string + DoRunner *DoTaskRunner } -func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { +func (f *ForTaskRunner) Run(input interface{}, taskSupport TaskSupport) (interface{}, error) { defer func() { // clear local variables - f.TaskSupport.RemoveLocalExprVars(f.Task.For.Each, f.Task.For.At) + taskSupport.RemoveLocalExprVars(f.Task.For.Each, f.Task.For.At) }() f.sanitizeFor() - in, err := expr.TraverseAndEvaluate(f.Task.For.In, input, f.TaskSupport.GetContext()) + in, err := expr.TraverseAndEvaluate(f.Task.For.In, input, taskSupport.GetContext()) if err != nil { return nil, err } @@ -71,11 +69,11 @@ func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { for i := 0; i < rv.Len(); i++ { item := rv.Index(i).Interface() - if forOutput, err = f.processForItem(i, item, forOutput); err != nil { + if forOutput, err = f.processForItem(i, item, taskSupport, forOutput); err != nil { return nil, err } if f.Task.While != "" { - whileIsTrue, err := traverseAndEvaluateBool(f.Task.While, forOutput, f.TaskSupport.GetContext()) + whileIsTrue, err := traverseAndEvaluateBool(f.Task.While, forOutput, taskSupport.GetContext()) if err != nil { return nil, err } @@ -87,7 +85,7 @@ func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { case reflect.Invalid: return input, nil default: - if forOutput, err = f.processForItem(0, in, forOutput); err != nil { + if forOutput, err = f.processForItem(0, in, taskSupport, forOutput); err != nil { return nil, err } } @@ -95,16 +93,16 @@ func (f *ForTaskRunner) Run(input interface{}) (interface{}, error) { return forOutput, nil } -func (f *ForTaskRunner) processForItem(idx int, item interface{}, forOutput interface{}) (interface{}, error) { +func (f *ForTaskRunner) processForItem(idx int, item interface{}, taskSupport TaskSupport, forOutput interface{}) (interface{}, error) { forVars := map[string]interface{}{ f.Task.For.At: idx, f.Task.For.Each: item, } // Instead of Set, we Add since other tasks in this very same context might be adding variables to the context - f.TaskSupport.AddLocalExprVars(forVars) + taskSupport.AddLocalExprVars(forVars) // output from previous iterations are merged together var err error - forOutput, err = f.DoRunner.Run(forOutput) + forOutput, err = f.DoRunner.Run(forOutput, taskSupport) if err != nil { return nil, err } diff --git a/impl/task_runner_raise.go b/impl/task_runner_raise.go index b59f01d..0de588f 100644 --- a/impl/task_runner_raise.go +++ b/impl/task_runner_raise.go @@ -20,8 +20,8 @@ import ( "github.com/serverlessworkflow/sdk-go/v3/model" ) -func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, taskSupport TaskSupport) (*RaiseTaskRunner, error) { - if err := resolveErrorDefinition(task, taskSupport.GetWorkflowDef()); err != nil { +func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, workflowDef *model.Workflow) (*RaiseTaskRunner, error) { + if err := resolveErrorDefinition(task, workflowDef); err != nil { return nil, err } @@ -29,9 +29,8 @@ func NewRaiseTaskRunner(taskName string, task *model.RaiseTask, taskSupport Task return nil, model.NewErrValidation(fmt.Errorf("no raise configuration provided for RaiseTask %s", taskName), taskName) } return &RaiseTaskRunner{ - Task: task, - TaskName: taskName, - TaskSupport: taskSupport, + Task: task, + TaskName: taskName, }, nil } @@ -53,9 +52,8 @@ func resolveErrorDefinition(t *model.RaiseTask, workflowDef *model.Workflow) err } type RaiseTaskRunner struct { - Task *model.RaiseTask - TaskName string - TaskSupport TaskSupport + Task *model.RaiseTask + TaskName string } var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ @@ -69,22 +67,22 @@ var raiseErrFuncMapping = map[string]func(error, string) *model.Error{ model.ErrorTypeTimeout: model.NewErrTimeout, } -func (r *RaiseTaskRunner) Run(input interface{}) (output interface{}, err error) { +func (r *RaiseTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { output = input // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition var detailResult interface{} - detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, r.TaskSupport.GetContext()) + detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } var titleResult interface{} - titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, r.TaskSupport.GetContext()) + titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } - instance := r.TaskSupport.GetTaskReference() + instance := taskSupport.GetTaskReference() var raiseErr *model.Error if raiseErrF, ok := raiseErrFuncMapping[r.Task.Raise.Error.Definition.Type.String()]; ok { diff --git a/impl/task_runner_raise_test.go b/impl/task_runner_raise_test.go index 0c55f3a..3de0aae 100644 --- a/impl/task_runner_raise_test.go +++ b/impl/task_runner_raise_test.go @@ -45,10 +45,11 @@ func TestRaiseTaskRunner_WithDefinedError(t *testing.T) { assert.NoError(t, err) wfCtx.SetTaskReference("task_raise_defined") - runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, newTaskSupport(withRunnerCtx(wfCtx))) + taskSupport := newTaskSupport(withRunnerCtx(wfCtx)) + runner, err := NewRaiseTaskRunner("task_raise_defined", raiseTask, taskSupport.GetWorkflowDef()) assert.NoError(t, err) - output, err := runner.Run(input) + output, err := runner.Run(input, taskSupport) assert.Equal(t, output, input) assert.Error(t, err) @@ -76,7 +77,7 @@ func TestRaiseTaskRunner_WithReferencedError(t *testing.T) { }, } - runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, newTaskSupport()) + runner, err := NewRaiseTaskRunner("task_raise_ref", raiseTask, &model.Workflow{}) assert.Error(t, err) assert.Nil(t, runner) } @@ -103,10 +104,11 @@ func TestRaiseTaskRunner_TimeoutErrorWithExpression(t *testing.T) { assert.NoError(t, err) wfCtx.SetTaskReference("task_raise_timeout_expr") - runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, newTaskSupport(withRunnerCtx(wfCtx))) + taskSupport := newTaskSupport(withRunnerCtx(wfCtx)) + runner, err := NewRaiseTaskRunner("task_raise_timeout_expr", raiseTask, taskSupport.GetWorkflowDef()) assert.NoError(t, err) - output, err := runner.Run(input) + output, err := runner.Run(input, taskSupport) assert.Equal(t, input, output) assert.Error(t, err) diff --git a/impl/task_runner_set.go b/impl/task_runner_set.go index fc40e74..40ff185 100644 --- a/impl/task_runner_set.go +++ b/impl/task_runner_set.go @@ -20,30 +20,28 @@ import ( "github.com/serverlessworkflow/sdk-go/v3/model" ) -func NewSetTaskRunner(taskName string, task *model.SetTask, taskSupport TaskSupport) (*SetTaskRunner, error) { +func NewSetTaskRunner(taskName string, task *model.SetTask) (*SetTaskRunner, error) { if task == nil || task.Set == nil { return nil, model.NewErrValidation(fmt.Errorf("no set configuration provided for SetTask %s", taskName), taskName) } return &SetTaskRunner{ - Task: task, - TaskName: taskName, - TaskSupport: taskSupport, + Task: task, + TaskName: taskName, }, nil } type SetTaskRunner struct { - Task *model.SetTask - TaskName string - TaskSupport TaskSupport + Task *model.SetTask + TaskName string } func (s *SetTaskRunner) GetTaskName() string { return s.TaskName } -func (s *SetTaskRunner) Run(input interface{}) (output interface{}, err error) { +func (s *SetTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { setObject := deepClone(s.Task.Set) - result, err := traverseAndEvaluate(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, s.TaskSupport.GetContext()) + result, err := traverseAndEvaluate(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } diff --git a/impl/task_set_test.go b/impl/task_set_test.go index c1d5534..c02d76d 100644 --- a/impl/task_set_test.go +++ b/impl/task_set_test.go @@ -45,10 +45,10 @@ func TestSetTaskExecutor_Exec(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task1", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task1", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -79,10 +79,10 @@ func TestSetTaskExecutor_StaticValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_static", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_static", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -109,10 +109,10 @@ func TestSetTaskExecutor_RuntimeExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_runtime_expr", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_runtime_expr", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -141,10 +141,10 @@ func TestSetTaskExecutor_NestedStructures(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_nested_structures", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_nested_structures", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -176,10 +176,10 @@ func TestSetTaskExecutor_StaticAndDynamicValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_static_dynamic", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_static_dynamic", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -201,10 +201,10 @@ func TestSetTaskExecutor_MissingInputData(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_missing_input", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_missing_input", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) assert.Nil(t, output.(map[string]interface{})["value"]) } @@ -220,10 +220,10 @@ func TestSetTaskExecutor_ExpressionsWithFunctions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_expr_functions", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_expr_functions", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -246,10 +246,10 @@ func TestSetTaskExecutor_ConditionalExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_conditional_expr", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_conditional_expr", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -273,10 +273,10 @@ func TestSetTaskExecutor_ArrayDynamicIndex(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_array_indexing", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_array_indexing", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -299,10 +299,10 @@ func TestSetTaskExecutor_NestedConditionalLogic(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_nested_condition", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_nested_condition", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -323,10 +323,10 @@ func TestSetTaskExecutor_DefaultValues(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_default_values", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_default_values", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -363,10 +363,10 @@ func TestSetTaskExecutor_ComplexNestedStructures(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_complex_nested", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_complex_nested", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ @@ -399,10 +399,10 @@ func TestSetTaskExecutor_MultipleExpressions(t *testing.T) { }, } - executor, err := NewSetTaskRunner("task_multiple_expr", setTask, newTaskSupport()) + executor, err := NewSetTaskRunner("task_multiple_expr", setTask) assert.NoError(t, err) - output, err := executor.Run(input) + output, err := executor.Run(input, newTaskSupport()) assert.NoError(t, err) expectedOutput := map[string]interface{}{ From 46481f69f82207a56b6dbe910fb39179d9b4ff38 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 09:32:28 -0400 Subject: [PATCH 105/110] chore(deps): bump golang.org/x/net from 0.37.0 to 0.38.0 (#237) Bumps [golang.org/x/net](https://github.com/golang/net) from 0.37.0 to 0.38.0. - [Commits](https://github.com/golang/net/compare/v0.37.0...v0.38.0) --- updated-dependencies: - dependency-name: golang.org/x/net dependency-version: 0.38.0 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 5 ++--- go.sum | 6 ++---- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index e7947a8..646715d 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ toolchain go1.24.0 require ( github.com/go-playground/validator/v10 v10.25.0 + github.com/google/uuid v1.6.0 github.com/itchyny/gojq v0.12.17 github.com/stretchr/testify v1.10.0 github.com/tidwall/gjson v1.18.0 @@ -19,17 +20,15 @@ require ( github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/google/go-cmp v0.7.0 // indirect - github.com/google/uuid v1.6.0 // indirect github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/relvacode/iso8601 v1.6.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect golang.org/x/crypto v0.36.0 // indirect - golang.org/x/net v0.37.0 // indirect + golang.org/x/net v0.38.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/text v0.23.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index e6e3d38..489a35c 100644 --- a/go.sum +++ b/go.sum @@ -25,8 +25,6 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/relvacode/iso8601 v1.6.0 h1:eFXUhMJN3Gz8Rcq82f9DTMW0svjtAVuIEULglM7QHTU= -github.com/relvacode/iso8601 v1.6.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= @@ -47,8 +45,8 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17 github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= From 23710eeb237df59f27d870208109867589491dbb Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 30 Apr 2025 11:56:39 -0300 Subject: [PATCH 106/110] Fix #238 - Add support to fork task (#240) * Fix #238 - Add support to fork task Signed-off-by: Ricardo Zanini * Adding missed headers Signed-off-by: Ricardo Zanini * Fix linters, makefile, fmt Signed-off-by: Ricardo Zanini * Fix Labeler CI Signed-off-by: Ricardo Zanini * Remove labeler Signed-off-by: Ricardo Zanini --------- Signed-off-by: Ricardo Zanini --- .github/workflows/pull_request_labeler.yml | 26 ---- Makefile | 8 +- impl/ctx/context.go | 35 +++++ impl/expr/expr.go | 25 ++++ impl/runner.go | 23 +++- impl/runner_test.go | 11 ++ impl/task_runner.go | 3 + impl/task_runner_do.go | 23 ++-- impl/task_runner_for.go | 2 +- impl/task_runner_fork.go | 120 ++++++++++++++++++ impl/task_runner_fork_test.go | 101 +++++++++++++++ impl/task_runner_raise.go | 5 +- impl/task_runner_set.go | 7 +- ...sk_set_test.go => task_runner_set_test.go} | 0 .../testdata/fork_simple.yaml | 29 +++-- impl/utils.go | 79 ------------ impl/{ => utils}/json_schema.go | 15 ++- impl/utils/utils.go | 38 ++++++ 18 files changed, 413 insertions(+), 137 deletions(-) delete mode 100644 .github/workflows/pull_request_labeler.yml create mode 100644 impl/task_runner_fork.go create mode 100644 impl/task_runner_fork_test.go rename impl/{task_set_test.go => task_runner_set_test.go} (100%) rename .github/labeler.yml => impl/testdata/fork_simple.yaml (51%) delete mode 100644 impl/utils.go rename impl/{ => utils}/json_schema.go (84%) create mode 100644 impl/utils/utils.go diff --git a/.github/workflows/pull_request_labeler.yml b/.github/workflows/pull_request_labeler.yml deleted file mode 100644 index f270294..0000000 --- a/.github/workflows/pull_request_labeler.yml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: "Pull Request Labeler" -on: - - pull_request_target - -jobs: - labeler: - permissions: - contents: read - pull-requests: write - runs-on: ubuntu-latest - steps: - - uses: actions/labeler@v5 \ No newline at end of file diff --git a/Makefile b/Makefile index 767d158..34bfc91 100644 --- a/Makefile +++ b/Makefile @@ -11,12 +11,14 @@ goimports: @goimports -w . lint: - @echo "πŸš€ Running lint..." - @command -v golangci-lint > /dev/null || (echo "πŸš€ Installing golangci-lint..."; curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b "${GOPATH}/bin") + @echo "πŸš€ Installing/updating golangci-lint…" + GO111MODULE=on go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest + + @echo "πŸš€ Running lint…" @make addheaders @make goimports @make fmt - @./hack/go-lint.sh ${params} + @$(GOPATH)/bin/golangci-lint run ./... ${params} @echo "βœ… Linting completed!" .PHONY: test diff --git a/impl/ctx/context.go b/impl/ctx/context.go index f013507..ff1d260 100644 --- a/impl/ctx/context.go +++ b/impl/ctx/context.go @@ -22,6 +22,8 @@ import ( "sync" "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + "github.com/google/uuid" "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -71,6 +73,7 @@ type WorkflowContext interface { SetLocalExprVars(vars map[string]interface{}) AddLocalExprVars(vars map[string]interface{}) RemoveLocalExprVars(keys ...string) + Clone() WorkflowContext } // workflowContext holds the necessary data for the workflow execution within the instance. @@ -118,6 +121,38 @@ func GetWorkflowContext(ctx context.Context) (WorkflowContext, error) { return wfCtx, nil } +func (ctx *workflowContext) Clone() WorkflowContext { + ctx.mu.Lock() + defer ctx.mu.Unlock() + + newInput := utils.DeepCloneValue(ctx.input) + newOutput := utils.DeepCloneValue(ctx.output) + + // deep clone each of the maps + newContextMap := utils.DeepClone(ctx.context) + newWorkflowDesc := utils.DeepClone(ctx.workflowDescriptor) + newTaskDesc := utils.DeepClone(ctx.taskDescriptor) + newLocalExprVars := utils.DeepClone(ctx.localExprVars) + + newStatusPhase := append([]StatusPhaseLog(nil), ctx.StatusPhase...) + + newTasksStatusPhase := make(map[string][]StatusPhaseLog, len(ctx.TasksStatusPhase)) + for taskName, logs := range ctx.TasksStatusPhase { + newTasksStatusPhase[taskName] = append([]StatusPhaseLog(nil), logs...) + } + + return &workflowContext{ + input: newInput, + output: newOutput, + context: newContextMap, + workflowDescriptor: newWorkflowDesc, + taskDescriptor: newTaskDesc, + localExprVars: newLocalExprVars, + StatusPhase: newStatusPhase, + TasksStatusPhase: newTasksStatusPhase, + } +} + func (ctx *workflowContext) SetStartedAt(t time.Time) { ctx.mu.Lock() defer ctx.mu.Unlock() diff --git a/impl/expr/expr.go b/impl/expr/expr.go index 60e2765..77faffb 100644 --- a/impl/expr/expr.go +++ b/impl/expr/expr.go @@ -132,3 +132,28 @@ func mergeContextInVars(nodeCtx context.Context, variables map[string]interface{ return nil } + +func TraverseAndEvaluateObj(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string, wfCtx context.Context) (output interface{}, err error) { + if runtimeExpr == nil { + return input, nil + } + output, err = TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input, wfCtx) + if err != nil { + return nil, model.NewErrExpression(err, taskName) + } + return output, nil +} + +func TraverseAndEvaluateBool(runtimeExpr string, input interface{}, wfCtx context.Context) (bool, error) { + if len(runtimeExpr) == 0 { + return false, nil + } + output, err := TraverseAndEvaluate(runtimeExpr, input, wfCtx) + if err != nil { + return false, nil + } + if result, ok := output.(bool); ok { + return result, nil + } + return false, nil +} diff --git a/impl/runner.go b/impl/runner.go index 362db1b..33d852a 100644 --- a/impl/runner.go +++ b/impl/runner.go @@ -19,6 +19,9 @@ import ( "fmt" "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -53,6 +56,18 @@ type workflowRunnerImpl struct { RunnerCtx ctx.WorkflowContext } +func (wr *workflowRunnerImpl) CloneWithContext(newCtx context.Context) TaskSupport { + clonedWfCtx := wr.RunnerCtx.Clone() + + ctxWithWf := ctx.WithWorkflowContext(newCtx, clonedWfCtx) + + return &workflowRunnerImpl{ + Workflow: wr.Workflow, + Context: ctxWithWf, + RunnerCtx: clonedWfCtx, + } +} + func (wr *workflowRunnerImpl) RemoveLocalExprVars(keys ...string) { wr.RunnerCtx.RemoveLocalExprVars(keys...) } @@ -175,13 +190,13 @@ func (wr *workflowRunnerImpl) wrapWorkflowError(err error) error { func (wr *workflowRunnerImpl) processInput(input interface{}) (output interface{}, err error) { if wr.Workflow.Input != nil { if wr.Workflow.Input.Schema != nil { - if err = validateSchema(input, wr.Workflow.Input.Schema, "/"); err != nil { + if err = utils.ValidateSchema(input, wr.Workflow.Input.Schema, "/"); err != nil { return nil, err } } if wr.Workflow.Input.From != nil { - output, err = traverseAndEvaluate(wr.Workflow.Input.From, input, "/", wr.Context) + output, err = expr.TraverseAndEvaluateObj(wr.Workflow.Input.From, input, "/", wr.Context) if err != nil { return nil, err } @@ -196,13 +211,13 @@ func (wr *workflowRunnerImpl) processOutput(output interface{}) (interface{}, er if wr.Workflow.Output != nil { if wr.Workflow.Output.As != nil { var err error - output, err = traverseAndEvaluate(wr.Workflow.Output.As, output, "/", wr.Context) + output, err = expr.TraverseAndEvaluateObj(wr.Workflow.Output.As, output, "/", wr.Context) if err != nil { return nil, err } } if wr.Workflow.Output.Schema != nil { - if err := validateSchema(output, wr.Workflow.Output.Schema, "/"); err != nil { + if err := utils.ValidateSchema(output, wr.Workflow.Output.Schema, "/"); err != nil { return nil, err } } diff --git a/impl/runner_test.go b/impl/runner_test.go index 9bb599c..5acdb6b 100644 --- a/impl/runner_test.go +++ b/impl/runner_test.go @@ -456,3 +456,14 @@ func TestSwitchTaskRunner_DefaultCase(t *testing.T) { runWorkflowTest(t, workflowPath, input, expectedOutput) }) } + +func TestForkSimple_NoCompete(t *testing.T) { + t.Run("Create a color array", func(t *testing.T) { + workflowPath := "./testdata/fork_simple.yaml" + input := map[string]interface{}{} + expectedOutput := map[string]interface{}{ + "colors": []interface{}{"red", "blue"}, + } + runWorkflowTest(t, workflowPath, input, expectedOutput) + }) +} diff --git a/impl/task_runner.go b/impl/task_runner.go index ea7b6dd..f825f79 100644 --- a/impl/task_runner.go +++ b/impl/task_runner.go @@ -53,4 +53,7 @@ type TaskSupport interface { AddLocalExprVars(vars map[string]interface{}) // RemoveLocalExprVars removes local variables added in AddLocalExprVars or SetLocalExprVars RemoveLocalExprVars(keys ...string) + // CloneWithContext returns a full clone of this TaskSupport, but using + // the provided context.Context (so deadlines/cancellations propagate). + CloneWithContext(ctx context.Context) TaskSupport } diff --git a/impl/task_runner_do.go b/impl/task_runner_do.go index 0301009..8b63bfc 100644 --- a/impl/task_runner_do.go +++ b/impl/task_runner_do.go @@ -18,6 +18,9 @@ import ( "fmt" "time" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + "github.com/serverlessworkflow/sdk-go/v3/impl/ctx" "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -35,6 +38,8 @@ func NewTaskRunner(taskName string, task model.Task, workflowDef *model.Workflow return NewForTaskRunner(taskName, t) case *model.CallHTTP: return NewCallHttpRunner(taskName, t) + case *model.ForkTask: + return NewForkTaskRunner(taskName, t, workflowDef) default: return nil, fmt.Errorf("unsupported task type '%T' for task '%s'", t, taskName) } @@ -117,7 +122,7 @@ func (d *DoTaskRunner) runTasks(input interface{}, taskSupport TaskSupport) (out } taskSupport.SetTaskStatus(currentTask.Key, ctx.CompletedStatus) - input = deepCloneValue(output) + input = utils.DeepCloneValue(output) idx, currentTask = d.TaskList.Next(idx) } @@ -126,7 +131,7 @@ func (d *DoTaskRunner) runTasks(input interface{}, taskSupport TaskSupport) (out func (d *DoTaskRunner) shouldRunTask(input interface{}, taskSupport TaskSupport, task *model.TaskItem) (bool, error) { if task.GetBase().If != nil { - output, err := traverseAndEvaluateBool(task.GetBase().If.String(), input, taskSupport.GetContext()) + output, err := expr.TraverseAndEvaluateBool(task.GetBase().If.String(), input, taskSupport.GetContext()) if err != nil { return false, model.NewErrExpression(err, task.Key) } @@ -143,7 +148,7 @@ func (d *DoTaskRunner) evaluateSwitchTask(input interface{}, taskSupport TaskSup defaultThen = switchCase.Then continue } - result, err := traverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, taskSupport.GetContext()) + result, err := expr.TraverseAndEvaluateBool(model.NormalizeExpr(switchCase.When.String()), input, taskSupport.GetContext()) if err != nil { return nil, model.NewErrExpression(err, taskKey) } @@ -199,11 +204,11 @@ func (d *DoTaskRunner) processTaskInput(task *model.TaskBase, taskInput interfac return taskInput, nil } - if err = validateSchema(taskInput, task.Input.Schema, taskName); err != nil { + if err = utils.ValidateSchema(taskInput, task.Input.Schema, taskName); err != nil { return nil, err } - if output, err = traverseAndEvaluate(task.Input.From, taskInput, taskName, taskSupport.GetContext()); err != nil { + if output, err = expr.TraverseAndEvaluateObj(task.Input.From, taskInput, taskName, taskSupport.GetContext()); err != nil { return nil, err } @@ -216,11 +221,11 @@ func (d *DoTaskRunner) processTaskOutput(task *model.TaskBase, taskOutput interf return taskOutput, nil } - if output, err = traverseAndEvaluate(task.Output.As, taskOutput, taskName, taskSupport.GetContext()); err != nil { + if output, err = expr.TraverseAndEvaluateObj(task.Output.As, taskOutput, taskName, taskSupport.GetContext()); err != nil { return nil, err } - if err = validateSchema(output, task.Output.Schema, taskName); err != nil { + if err = utils.ValidateSchema(output, task.Output.Schema, taskName); err != nil { return nil, err } @@ -232,12 +237,12 @@ func (d *DoTaskRunner) processTaskExport(task *model.TaskBase, taskOutput interf return nil } - output, err := traverseAndEvaluate(task.Export.As, taskOutput, taskName, taskSupport.GetContext()) + output, err := expr.TraverseAndEvaluateObj(task.Export.As, taskOutput, taskName, taskSupport.GetContext()) if err != nil { return err } - if err = validateSchema(output, task.Export.Schema, taskName); err != nil { + if err = utils.ValidateSchema(output, task.Export.Schema, taskName); err != nil { return nil } diff --git a/impl/task_runner_for.go b/impl/task_runner_for.go index a53348d..90461f9 100644 --- a/impl/task_runner_for.go +++ b/impl/task_runner_for.go @@ -73,7 +73,7 @@ func (f *ForTaskRunner) Run(input interface{}, taskSupport TaskSupport) (interfa return nil, err } if f.Task.While != "" { - whileIsTrue, err := traverseAndEvaluateBool(f.Task.While, forOutput, taskSupport.GetContext()) + whileIsTrue, err := expr.TraverseAndEvaluateBool(f.Task.While, forOutput, taskSupport.GetContext()) if err != nil { return nil, err } diff --git a/impl/task_runner_fork.go b/impl/task_runner_fork.go new file mode 100644 index 0000000..9a68399 --- /dev/null +++ b/impl/task_runner_fork.go @@ -0,0 +1,120 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "fmt" + "sync" + + "github.com/serverlessworkflow/sdk-go/v3/model" +) + +func NewForkTaskRunner(taskName string, task *model.ForkTask, workflowDef *model.Workflow) (*ForkTaskRunner, error) { + if task == nil || task.Fork.Branches == nil { + return nil, model.NewErrValidation(fmt.Errorf("invalid Fork task %s", taskName), taskName) + } + + var runners []TaskRunner + for _, branchItem := range *task.Fork.Branches { + r, err := NewTaskRunner(branchItem.Key, branchItem.Task, workflowDef) + if err != nil { + return nil, err + } + runners = append(runners, r) + } + + return &ForkTaskRunner{ + Task: task, + TaskName: taskName, + BranchRunners: runners, + }, nil +} + +type ForkTaskRunner struct { + Task *model.ForkTask + TaskName string + BranchRunners []TaskRunner +} + +func (f ForkTaskRunner) GetTaskName() string { + return f.TaskName +} + +func (f ForkTaskRunner) Run(input interface{}, parentSupport TaskSupport) (interface{}, error) { + cancelCtx, cancel := context.WithCancel(parentSupport.GetContext()) + defer cancel() + + n := len(f.BranchRunners) + results := make([]interface{}, n) + errs := make(chan error, n) + done := make(chan struct{}) + resultCh := make(chan interface{}, 1) + + var ( + wg sync.WaitGroup + once sync.Once // <-- declare a Once + ) + + for i, runner := range f.BranchRunners { + wg.Add(1) + go func(i int, runner TaskRunner) { + defer wg.Done() + // **Isolate context** for each branch! + branchSupport := parentSupport.CloneWithContext(cancelCtx) + + select { + case <-cancelCtx.Done(): + return + default: + } + + out, err := runner.Run(input, branchSupport) + if err != nil { + errs <- err + return + } + results[i] = out + + if f.Task.Fork.Compete { + select { + case resultCh <- out: + once.Do(func() { + cancel() // **signal cancellation** to all other branches + close(done) // signal we have a winner + }) + default: + } + } + }(i, runner) + } + + if f.Task.Fork.Compete { + select { + case <-done: + return <-resultCh, nil + case err := <-errs: + return nil, err + } + } + + wg.Wait() + select { + case err := <-errs: + return nil, err + default: + } + return results, nil +} diff --git a/impl/task_runner_fork_test.go b/impl/task_runner_fork_test.go new file mode 100644 index 0000000..f38b817 --- /dev/null +++ b/impl/task_runner_fork_test.go @@ -0,0 +1,101 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "testing" + "time" + + "github.com/serverlessworkflow/sdk-go/v3/model" + "github.com/stretchr/testify/assert" +) + +// dummyRunner simulates a TaskRunner that returns its name after an optional delay. +type dummyRunner struct { + name string + delay time.Duration +} + +func (d *dummyRunner) GetTaskName() string { + return d.name +} + +func (d *dummyRunner) Run(input interface{}, ts TaskSupport) (interface{}, error) { + select { + case <-ts.GetContext().Done(): + // canceled + return nil, ts.GetContext().Err() + case <-time.After(d.delay): + // complete after delay + return d.name, nil + } +} + +func TestForkTaskRunner_NonCompete(t *testing.T) { + // Prepare a TaskSupport with a background context + ts := newTaskSupport(withContext(context.Background())) + + // Two branches that complete immediately + branches := []TaskRunner{ + &dummyRunner{name: "r1", delay: 0}, + &dummyRunner{name: "r2", delay: 0}, + } + fork := ForkTaskRunner{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: false, + }, + }, + TaskName: "fork", + BranchRunners: branches, + } + + output, err := fork.Run("in", ts) + assert.NoError(t, err) + + results, ok := output.([]interface{}) + assert.True(t, ok, "expected output to be []interface{}") + assert.Equal(t, []interface{}{"r1", "r2"}, results) +} + +func TestForkTaskRunner_Compete(t *testing.T) { + // Prepare a TaskSupport with a background context + ts := newTaskSupport(withContext(context.Background())) + + // One fast branch and one slow branch + branches := []TaskRunner{ + &dummyRunner{name: "fast", delay: 10 * time.Millisecond}, + &dummyRunner{name: "slow", delay: 50 * time.Millisecond}, + } + fork := ForkTaskRunner{ + Task: &model.ForkTask{ + Fork: model.ForkTaskConfiguration{ + Compete: true, + }, + }, + TaskName: "fork", + BranchRunners: branches, + } + + start := time.Now() + output, err := fork.Run("in", ts) + elapsed := time.Since(start) + + assert.NoError(t, err) + assert.Equal(t, "fast", output) + // ensure compete returns before the slow branch would finish + assert.Less(t, elapsed, 50*time.Millisecond, "compete should cancel the slow branch") +} diff --git a/impl/task_runner_raise.go b/impl/task_runner_raise.go index 0de588f..dddaf0c 100644 --- a/impl/task_runner_raise.go +++ b/impl/task_runner_raise.go @@ -17,6 +17,7 @@ package impl import ( "fmt" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -71,13 +72,13 @@ func (r *RaiseTaskRunner) Run(input interface{}, taskSupport TaskSupport) (outpu output = input // TODO: make this an external func so we can call it after getting the reference? Or we can get the reference from the workflow definition var detailResult interface{} - detailResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) + detailResult, err = expr.TraverseAndEvaluateObj(r.Task.Raise.Error.Definition.Detail.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } var titleResult interface{} - titleResult, err = traverseAndEvaluate(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) + titleResult, err = expr.TraverseAndEvaluateObj(r.Task.Raise.Error.Definition.Title.AsObjectOrRuntimeExpr(), input, r.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } diff --git a/impl/task_runner_set.go b/impl/task_runner_set.go index 40ff185..f2aaaa9 100644 --- a/impl/task_runner_set.go +++ b/impl/task_runner_set.go @@ -17,6 +17,9 @@ package impl import ( "fmt" + "github.com/serverlessworkflow/sdk-go/v3/impl/expr" + "github.com/serverlessworkflow/sdk-go/v3/impl/utils" + "github.com/serverlessworkflow/sdk-go/v3/model" ) @@ -40,8 +43,8 @@ func (s *SetTaskRunner) GetTaskName() string { } func (s *SetTaskRunner) Run(input interface{}, taskSupport TaskSupport) (output interface{}, err error) { - setObject := deepClone(s.Task.Set) - result, err := traverseAndEvaluate(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, taskSupport.GetContext()) + setObject := utils.DeepClone(s.Task.Set) + result, err := expr.TraverseAndEvaluateObj(model.NewObjectOrRuntimeExpr(setObject), input, s.TaskName, taskSupport.GetContext()) if err != nil { return nil, err } diff --git a/impl/task_set_test.go b/impl/task_runner_set_test.go similarity index 100% rename from impl/task_set_test.go rename to impl/task_runner_set_test.go diff --git a/.github/labeler.yml b/impl/testdata/fork_simple.yaml similarity index 51% rename from .github/labeler.yml rename to impl/testdata/fork_simple.yaml index 49abd17..044b1e2 100644 --- a/.github/labeler.yml +++ b/impl/testdata/fork_simple.yaml @@ -1,10 +1,10 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2025 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,9 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -"documentation :notebook:": - - changed-files: - - any-glob-to-any-file: ['contrib/*', '**/*.md'] -kubernetes: - - changed-files: - - any-glob-to-any-file: ['kubernetes/*', 'hack/builder-gen.sh', 'hack/deepcopy-gen.sh', 'Makefile'] +document: + dsl: '1.0.0' + namespace: test + name: fork-example + version: '0.1.0' +do: + - branchColors: + fork: + compete: false + branches: + - setRed: + set: + color1: red + - setBlue: + set: + color2: blue + - joinResult: + set: + colors: "${ [.[] | .[]] }" diff --git a/impl/utils.go b/impl/utils.go deleted file mode 100644 index a62559d..0000000 --- a/impl/utils.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2025 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package impl - -import ( - "context" - - "github.com/serverlessworkflow/sdk-go/v3/impl/expr" - "github.com/serverlessworkflow/sdk-go/v3/model" -) - -// Deep clone a map to avoid modifying the original object -func deepClone(obj map[string]interface{}) map[string]interface{} { - clone := make(map[string]interface{}) - for key, value := range obj { - clone[key] = deepCloneValue(value) - } - return clone -} - -func deepCloneValue(value interface{}) interface{} { - if m, ok := value.(map[string]interface{}); ok { - return deepClone(m) - } - if s, ok := value.([]interface{}); ok { - clonedSlice := make([]interface{}, len(s)) - for i, v := range s { - clonedSlice[i] = deepCloneValue(v) - } - return clonedSlice - } - return value -} - -func validateSchema(data interface{}, schema *model.Schema, taskName string) error { - if schema != nil { - if err := ValidateJSONSchema(data, schema); err != nil { - return model.NewErrValidation(err, taskName) - } - } - return nil -} - -func traverseAndEvaluate(runtimeExpr *model.ObjectOrRuntimeExpr, input interface{}, taskName string, wfCtx context.Context) (output interface{}, err error) { - if runtimeExpr == nil { - return input, nil - } - output, err = expr.TraverseAndEvaluate(runtimeExpr.AsStringOrMap(), input, wfCtx) - if err != nil { - return nil, model.NewErrExpression(err, taskName) - } - return output, nil -} - -func traverseAndEvaluateBool(runtimeExpr string, input interface{}, wfCtx context.Context) (bool, error) { - if len(runtimeExpr) == 0 { - return false, nil - } - output, err := expr.TraverseAndEvaluate(runtimeExpr, input, wfCtx) - if err != nil { - return false, nil - } - if result, ok := output.(bool); ok { - return result, nil - } - return false, nil -} diff --git a/impl/json_schema.go b/impl/utils/json_schema.go similarity index 84% rename from impl/json_schema.go rename to impl/utils/json_schema.go index 396f9f5..9b91553 100644 --- a/impl/json_schema.go +++ b/impl/utils/json_schema.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package impl +package utils import ( "encoding/json" @@ -23,8 +23,8 @@ import ( "github.com/xeipuuv/gojsonschema" ) -// ValidateJSONSchema validates the provided data against a model.Schema. -func ValidateJSONSchema(data interface{}, schema *model.Schema) error { +// validateJSONSchema validates the provided data against a model.Schema. +func validateJSONSchema(data interface{}, schema *model.Schema) error { if schema == nil { return nil } @@ -68,3 +68,12 @@ func ValidateJSONSchema(data interface{}, schema *model.Schema) error { return nil } + +func ValidateSchema(data interface{}, schema *model.Schema, taskName string) error { + if schema != nil { + if err := validateJSONSchema(data, schema); err != nil { + return model.NewErrValidation(err, taskName) + } + } + return nil +} diff --git a/impl/utils/utils.go b/impl/utils/utils.go new file mode 100644 index 0000000..f444139 --- /dev/null +++ b/impl/utils/utils.go @@ -0,0 +1,38 @@ +// Copyright 2025 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +// DeepClone a map to avoid modifying the original object +func DeepClone(obj map[string]interface{}) map[string]interface{} { + clone := make(map[string]interface{}) + for key, value := range obj { + clone[key] = DeepCloneValue(value) + } + return clone +} + +func DeepCloneValue(value interface{}) interface{} { + if m, ok := value.(map[string]interface{}); ok { + return DeepClone(m) + } + if s, ok := value.([]interface{}); ok { + clonedSlice := make([]interface{}, len(s)) + for i, v := range s { + clonedSlice[i] = DeepCloneValue(v) + } + return clonedSlice + } + return value +} From edd5f5e89136c8bd756e6058cddda98e923f89d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Giovanny=20Guti=C3=A9rrez?= Date: Fri, 2 May 2025 09:57:44 -0500 Subject: [PATCH 107/110] fix: Endpoint configuration should also accept expressions (#225) Signed-off-by: Gio Gutierrez --- model/endpoint.go | 36 ++++++++++++++++++++++++++++++------ model/endpoint_test.go | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 6 deletions(-) diff --git a/model/endpoint.go b/model/endpoint.go index 38e2cea..cd9ee88 100644 --- a/model/endpoint.go +++ b/model/endpoint.go @@ -95,8 +95,9 @@ func (u *LiteralUri) GetValue() interface{} { } type EndpointConfiguration struct { - URI URITemplate `json:"uri" validate:"required"` - Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` + RuntimeExpression *RuntimeExpression `json:"-"` + URI URITemplate `json:"uri" validate:"required"` + Authentication *ReferenceableAuthenticationPolicy `json:"authentication,omitempty"` } // UnmarshalJSON implements custom unmarshalling for EndpointConfiguration. @@ -116,12 +117,35 @@ func (e *EndpointConfiguration) UnmarshalJSON(data []byte) error { // Unmarshal the URI field into the appropriate URITemplate implementation uri, err := UnmarshalURITemplate(temp.URI) - if err != nil { - return fmt.Errorf("invalid URI in EndpointConfiguration: %w", err) + if err == nil { + e.URI = uri + return nil + } + + var runtimeExpr RuntimeExpression + if err := json.Unmarshal(temp.URI, &runtimeExpr); err == nil && runtimeExpr.IsValid() { + e.RuntimeExpression = &runtimeExpr + return nil } - e.URI = uri - return nil + return errors.New("failed to unmarshal EndpointConfiguration: data does not match any known schema") +} + +// MarshalJSON implements custom marshalling for Endpoint. +func (e *EndpointConfiguration) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}) + if e.Authentication != nil { + m["authentication"] = e.Authentication + } + + if e.RuntimeExpression != nil { + m["uri"] = e.RuntimeExpression + } else if e.URI != nil { + m["uri"] = e.URI + } + + // Return an empty JSON object when no fields are set + return json.Marshal(m) } type Endpoint struct { diff --git a/model/endpoint_test.go b/model/endpoint_test.go index 974216e..db2fce5 100644 --- a/model/endpoint_test.go +++ b/model/endpoint_test.go @@ -71,6 +71,48 @@ func TestEndpoint_UnmarshalJSON(t *testing.T) { assert.Equal(t, "admin", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.Basic.Password, "Authentication Password should match") }) + t.Run("Valid EndpointConfiguration with reference", func(t *testing.T) { + input := `{ + "uri": "http://example.com/{id}", + "authentication": { + "oauth2": { "use": "secret" } + } + }` + + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.NotNil(t, endpoint.EndpointConfig.URI, "EndpointConfig URI should be set") + assert.Nil(t, endpoint.EndpointConfig.RuntimeExpression, "EndpointConfig Expression should not be set") + assert.Equal(t, "secret", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.OAuth2.Use, "Authentication secret should match") + b, err := json.Marshal(&endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, input, string(b), "Output JSON should match") + }) + + t.Run("Valid EndpointConfiguration with reference and expression", func(t *testing.T) { + input := `{ + "uri": "${example}", + "authentication": { + "oauth2": { "use": "secret" } + } + }` + + var endpoint Endpoint + err := json.Unmarshal([]byte(input), &endpoint) + + assert.NoError(t, err, "Unmarshal should not return an error") + assert.NotNil(t, endpoint.EndpointConfig, "EndpointConfig should be set") + assert.Nil(t, endpoint.EndpointConfig.URI, "EndpointConfig URI should not be set") + assert.NotNil(t, endpoint.EndpointConfig.RuntimeExpression, "EndpointConfig Expression should be set") + assert.Equal(t, "secret", endpoint.EndpointConfig.Authentication.AuthenticationPolicy.OAuth2.Use, "Authentication secret should match") + b, err := json.Marshal(&endpoint) + assert.NoError(t, err, "Marshal should not return an error") + assert.JSONEq(t, input, string(b), "Output JSON should match") + }) + t.Run("Invalid JSON Structure", func(t *testing.T) { input := `{"invalid": "data"}` var endpoint Endpoint From 7a905eb18fa22a99729a3f3e8ce26555dd1ddf41 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 2 May 2025 11:02:11 -0400 Subject: [PATCH 108/110] Update README releases table --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1a6654e..2aa64b5 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ This table indicates the current state of implementation of various SDK features | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | -| [v2.4.3](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.4.1) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | +| [v2.5.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.5.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | | [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0) | --- From e245973ce94a89710eda9332c2bfc2c0aae28461 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 2 May 2025 11:04:22 -0400 Subject: [PATCH 109/110] Update implementation Roadmap - Add Fork --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2aa64b5..296fcde 100644 --- a/README.md +++ b/README.md @@ -127,7 +127,7 @@ The table below lists the current state of this implementation. This table is a | Task Do | βœ… | | Task Emit | ❌ | | Task For | βœ… | -| Task Fork | ❌ | +| Task Fork | βœ… | | Task Listen | ❌ | | Task Raise | βœ… | | Task Run | ❌ | From 592f31d64f24a3afd4d10040ae5a488f9600158b Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Fri, 2 May 2025 11:06:38 -0400 Subject: [PATCH 110/110] Prepare v3.1.0 release --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 296fcde..36f11c8 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ This table indicates the current state of implementation of various SDK features | [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | | [v2.1.2](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.2) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | | [v2.5.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.5.0) | [v0.8](https://github.com/serverlessworkflow/specification/tree/0.8.x) | -| [v3.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.0.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0) | +| [v3.1.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v3.1.0) | [v1.0.0](https://github.com/serverlessworkflow/specification/releases/tag/v1.0.0) | --- pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy