diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index f34e453..3f3ee52 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -8,24 +8,24 @@ on: jobs: tests: - name: "Python ${{ matrix.python-version }}" + name: "Python 3.9" runs-on: "ubuntu-latest" - - strategy: - matrix: - python-version: ["3.9"] - steps: - - uses: "actions/checkout@v2" - - uses: "actions/setup-python@v2" + - uses: "actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683" # v4.2.2 + - uses: "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065" # v5.6.0 with: - python-version: "${{ matrix.python-version }}" + python-version: '3.9' + - name: Install graphviz + run: | + sudo apt-get update + sudo apt-get install graphviz graphviz-dev - name: Install dependencies run: | pip install pipenv pip install build - pipenv install --dev + pipenv install --dev --system + pip install setuptools==70.3.0 - name: Test run: | pipenv run pytest diff --git a/Pipfile b/Pipfile index a960d1e..122a90e 100644 --- a/Pipfile +++ b/Pipfile @@ -7,10 +7,12 @@ name = "pypi" jsonschema = "==4.4.0" pyyaml = "==6.0" requests = "*" +pygraphviz = "==1.11" +transitions = "==0.9.2" [dev-packages] pytest = "==6.2.5" pytest-runner = "==5.3.1" [requires] -python_version = "3" +python_version = "3.9" diff --git a/Pipfile.lock b/Pipfile.lock index fc4c845..441484f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "0dab8d21fd675638bd5d9dd5ba402e4aafe1d094c2bf402b90d06fd210e0077a" + "sha256": "f618c8a1f7bfa991732719216d5ac4c908d421c47ec2b6a0a13d7bca4d8f4da1" }, "pipfile-spec": 6, "requires": { - "python_version": "3" + "python_version": "3.9" }, "sources": [ { @@ -18,34 +18,125 @@ "default": { "attrs": { "hashes": [ - "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", - "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" + "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", + "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.4.0" + "markers": "python_version >= '3.8'", + "version": "==25.3.0" }, "certifi": { "hashes": [ - "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", - "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", + "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b" ], - "version": "==2021.10.8" + "markers": "python_version >= '3.7'", + "version": "==2025.6.15" }, "charset-normalizer": { "hashes": [ - "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd", - "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455" + "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", + "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45", + "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", + "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", + "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", + "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", + "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d", + "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", + "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184", + "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", + "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b", + "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64", + "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", + "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", + "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", + "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344", + "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58", + "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", + "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", + "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", + "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", + "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", + "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", + "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", + "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", + "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1", + "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01", + "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", + "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58", + "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", + "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", + "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2", + "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a", + "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", + "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", + "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5", + "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb", + "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f", + "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", + "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", + "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", + "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", + "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7", + "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", + "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455", + "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", + "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4", + "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", + "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", + "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", + "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", + "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", + "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", + "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", + "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", + "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", + "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", + "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa", + "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", + "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", + "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", + "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", + "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", + "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", + "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02", + "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", + "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", + "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", + "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", + "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", + "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", + "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", + "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681", + "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", + "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", + "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a", + "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", + "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", + "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", + "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", + "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027", + "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", + "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", + "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", + "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", + "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", + "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", + "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da", + "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", + "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f", + "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", + "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f" ], - "markers": "python_version >= '3'", - "version": "==2.0.10" + "markers": "python_version >= '3.7'", + "version": "==3.4.2" }, "idna": { "hashes": [ - "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", - "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" + "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", + "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" ], - "markers": "python_version >= '3'", - "version": "==3.3" + "markers": "python_version >= '3.6'", + "version": "==3.10" }, "jsonschema": { "hashes": [ @@ -53,37 +144,58 @@ "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==4.4.0" }, + "pygraphviz": { + "hashes": [ + "sha256:a97eb5ced266f45053ebb1f2c6c6d29091690503e3a5c14be7f908b37b06f2d4" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.11" + }, "pyrsistent": { "hashes": [ - "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c", - "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc", - "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e", - "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26", - "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec", - "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286", - "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045", - "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec", - "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8", - "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c", - "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca", - "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22", - "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a", - "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96", - "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc", - "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1", - "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07", - "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6", - "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b", - "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5", - "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6" + "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f", + "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e", + "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958", + "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34", + "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca", + "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d", + "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d", + "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4", + "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714", + "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf", + "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee", + "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8", + "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224", + "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d", + "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054", + "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656", + "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7", + "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423", + "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce", + "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e", + "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3", + "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0", + "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f", + "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b", + "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce", + "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a", + "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174", + "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86", + "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f", + "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b", + "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98", + "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022" ], - "markers": "python_version >= '3.7'", - "version": "==0.18.1" + "markers": "python_version >= '3.8'", + "version": "==0.20.0" }, "pyyaml": { "hashes": [ + "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf", "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293", "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b", "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57", @@ -95,80 +207,105 @@ "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287", "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513", "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0", + "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782", "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0", "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92", "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f", "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2", "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc", + "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1", "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c", "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86", "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4", "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c", "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34", "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b", + "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d", "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c", "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb", + "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7", "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737", "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3", "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d", + "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358", "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53", "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78", "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803", "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a", + "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f", "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174", "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==6.0" }, "requests": { "hashes": [ - "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", - "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", + "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422" ], "index": "pypi", - "version": "==2.27.1" + "markers": "python_version >= '3.8'", + "version": "==2.32.4" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "transitions": { + "hashes": [ + "sha256:2f8490dbdbd419366cef1516032ab06d07ccb5839ef54905e842a472692d4204", + "sha256:f7b40c9b4a93869f36c4d1c33809aeb18cdeeb065fd1adba018ee39c3db216f3" + ], + "index": "pypi", + "version": "==0.9.2" }, "urllib3": { "hashes": [ - "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed", - "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c" + "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", + "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.8" + "markers": "python_version >= '3.9'", + "version": "==2.5.0" } }, "develop": { "attrs": { "hashes": [ - "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", - "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" + "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", + "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.4.0" + "markers": "python_version >= '3.8'", + "version": "==25.3.0" }, "iniconfig": { "hashes": [ - "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", - "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", + "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" ], - "version": "==1.1.1" + "markers": "python_version >= '3.8'", + "version": "==2.1.0" }, "packaging": { "hashes": [ - "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", - "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" + "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", + "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" ], - "markers": "python_version >= '3.6'", - "version": "==21.3" + "markers": "python_version >= '3.8'", + "version": "==25.0" }, "pluggy": { "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" + "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", + "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" + "markers": "python_version >= '3.9'", + "version": "==1.6.0" }, "py": { "hashes": [ @@ -178,20 +315,13 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.11.0" }, - "pyparsing": { - "hashes": [ - "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4", - "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81" - ], - "markers": "python_version >= '3.6'", - "version": "==3.0.6" - }, "pytest": { "hashes": [ "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==6.2.5" }, "pytest-runner": { @@ -200,6 +330,7 @@ "sha256:85f93af814438ee322b4ea08fe3f5c2ad53b253577f3bd84b2ad451fee450ac5" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==5.3.1" }, "toml": { @@ -207,7 +338,7 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", "version": "==0.10.2" } } diff --git a/README.md b/README.md index fb48981..5916476 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,9 @@ Current sdk version conforms to the [Serverless Workflow specification v0.8](htt - pipenv required `pip install pipenv` ``` -pipenv install --dev +pipenv install --dev + +pipenv run pip install 'setuptools==70.3.0' pipenv shell @@ -132,4 +134,33 @@ WorkflowValidator(Workflow(workflow)).validate() ``` The `validate` method will raise an exception if the provided workflow does not complaint specification. -You can see a full example in the [test_workflow_validator](tests/serverlessworkflow/sdk/test_workflow_validator.py) file \ No newline at end of file +You can see a full example in the [test_workflow_validator](tests/serverlessworkflow/sdk/test_workflow_validator.py) file + +## Generate workflow state machine and graph + +To generate the workflow graph diagram: + +```python +from serverlessworkflow.sdk.workflow import Workflow +from serverlessworkflow.sdk.state_machine_helper import StateMachineHelper + +def main(): + subflows = [] + with open("tests/examples/graph.json") as f: + workflow = Workflow.from_source(f.read()) + with open("tests/examples/advertise-listing.json") as f: + subflows.append(Workflow.from_source(f.read())) + with open("tests/examples/second-subgraph.json") as f: + subflows.append(Workflow.from_source(f.read())) + machine_helper = StateMachineHelper(workflow=workflow, get_actions=True, subflows=subflows) + machine_helper.draw('diagram.svg') + + +if __name__ == "__main__": + main() +``` + +The `StateMachineHelper` can be set with `get_actions` as `False` and the produced diagram will not represent the actions inside each state (it will only create a diagram with the states and their transitions). Moreover, the developer may not give any `subflows`, and they simply will not be generated. +As for the `draw` method, the developer can also specify `graph_engine='mermaid'`. In that case, the method will not generate a figure, but rather the Mermaid code that can be executed, for instance, in the [Mermaid Live Editor](https://mermaid.live). + +It is also possible to only generate the workflow state machine. An example on how to do so can be analyzed in the [state_machine_helper](serverlessworkflow/sdk/state_machine_helper.py) source code. diff --git a/code-of-conduct.md b/code-of-conduct.md index c868ede..97a8526 100644 --- a/code-of-conduct.md +++ b/code-of-conduct.md @@ -1,59 +1,11 @@ -## CNCF Community Code of Conduct v1.0 +# Code of Conduct -Other languages available: -- [Chinese/中文](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/zh.md) -- [German/Deutsch](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/de.md) -- [Spanish/Español](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/es.md) -- [French/Français](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/fr.md) -- [Italian/Italiano](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/it.md) -- [Japanese/日本語](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/jp.md) -- [Korean/한국어](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ko.md) -- [Ukrainian/Українська](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/uk.md) -- [Russian/Русский](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ru.md) -- [Portuguese/Português](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pt.md) -- [Arabic/العربية](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/ar.md) -- [Polish/Polski](https://github.com/cncf/foundation/blob/master/code-of-conduct-languages/pl.md) +We follow the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/main/code-of-conduct.md). -### Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering -an open and welcoming community, we pledge to respect all people who contribute -through reporting issues, posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for -everyone, regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, body size, race, ethnicity, age, -religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing others' private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are not -aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers -commit themselves to fairly and consistently applying these principles to every aspect -of managing this project. Project maintainers who do not follow or enforce the Code of -Conduct may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior in Kubernetes may be reported by contacting the [Kubernetes Code of Conduct Committee](https://git.k8s.io/community/committee-code-of-conduct) via conduct@kubernetes.io. For other projects, please contact a CNCF project maintainer or our mediator, Mishi Choudhary via mishi@linux.com. - -This Code of Conduct is adapted from the Contributor Covenant -(http://contributor-covenant.org), version 1.2.0, available at -http://contributor-covenant.org/version/1/2/0/ - - -### CNCF Events Code of Conduct - -CNCF events are governed by the Linux Foundation [Code of Conduct](https://events.linuxfoundation.org/code-of-conduct/) available on the event page. -This is designed to be compatible with the above policy and also includes more details on responding to incidents. \ No newline at end of file + +Please contact the [CNCF Code of Conduct Committee](mailto:conduct@cncf.io) +in order to report violations of the Code of Conduct. diff --git a/serverlessworkflow/sdk/callback_state.py b/serverlessworkflow/sdk/callback_state.py index a0e64af..1b73efb 100644 --- a/serverlessworkflow/sdk/callback_state.py +++ b/serverlessworkflow/sdk/callback_state.py @@ -49,7 +49,7 @@ def __init__(self, **kwargs): _default_values = {'type': 'callback', 'usedForCompensation': False, } - SwfBase.__init__(self, locals(), kwargs, SwfBase.default_hydration, _default_values) + SwfBase.__init__(self, locals(), kwargs, CallbackState.f_hydration, _default_values) @staticmethod def f_hydration(p_key, p_value): diff --git a/serverlessworkflow/sdk/on_events.py b/serverlessworkflow/sdk/on_events.py index d052464..22d5030 100644 --- a/serverlessworkflow/sdk/on_events.py +++ b/serverlessworkflow/sdk/on_events.py @@ -19,7 +19,7 @@ def __init__(self, **kwargs): _default_values = {'actionMode': 'sequential'} - SwfBase.__init__(self, locals(), kwargs, SwfBase.default_hydration, _default_values) + SwfBase.__init__(self, locals(), kwargs, OnEvents.f_hydration, _default_values) @staticmethod def f_hydration(p_key, p_value): diff --git a/serverlessworkflow/sdk/state_machine_extensions.py b/serverlessworkflow/sdk/state_machine_extensions.py new file mode 100644 index 0000000..e6a0fff --- /dev/null +++ b/serverlessworkflow/sdk/state_machine_extensions.py @@ -0,0 +1,41 @@ +from transitions.extensions.states import add_state_features, Tags, State +from transitions.extensions import ( + HierarchicalMachine, + GraphMachine, + HierarchicalGraphMachine, +) + + +class Metadata(State): + """Allows states to have metadata. + Attributes: + metadata (dict): A dictionary with the state metadata. + """ + + def __init__(self, *args, **kwargs): + """ + Args: + **kwargs: If kwargs contains `metadata`, assign them to the attribute. + """ + self.metadata = kwargs.pop("metadata", None) + super(Metadata, self).__init__(*args, **kwargs) + + def __getattr__(self, key): + if value := self.metadata.get(key) is not None: + return value + return super(Metadata, self).__getattribute__(key) + + +@add_state_features(Tags, Metadata) +class CustomHierarchicalMachine(HierarchicalMachine): + pass + + +@add_state_features(Tags, Metadata) +class CustomHierarchicalGraphMachine(HierarchicalGraphMachine): + pass + + +@add_state_features(Tags, Metadata) +class CustomGraphMachine(GraphMachine): + pass diff --git a/serverlessworkflow/sdk/state_machine_generator.py b/serverlessworkflow/sdk/state_machine_generator.py new file mode 100644 index 0000000..a1e118b --- /dev/null +++ b/serverlessworkflow/sdk/state_machine_generator.py @@ -0,0 +1,533 @@ +from typing import Any, Dict, List, Optional, Union +from serverlessworkflow.sdk.action import Action +from serverlessworkflow.sdk.function_ref import FunctionRef +from serverlessworkflow.sdk.state_machine_extensions import ( + CustomGraphMachine, + CustomHierarchicalGraphMachine, + CustomHierarchicalMachine, +) +from serverlessworkflow.sdk.transition import Transition +from serverlessworkflow.sdk.workflow import ( + State, + EventState, + SleepState, + CallbackState, + DataBasedSwitchState, + InjectState, + EventBasedSwitchState, + ParallelState, + OperationState, + ForEachState, + Workflow, +) +from serverlessworkflow.sdk.transition_data_condition import TransitionDataCondition +from serverlessworkflow.sdk.end_data_condition import EndDataCondition + +from transitions.extensions import HierarchicalMachine, GraphMachine +from transitions.extensions.nesting import NestedState +import warnings + +NestedState.separator = "." + + +class StateMachineGenerator: + def __init__( + self, + workflow: Workflow, + state_machine: Union[CustomHierarchicalMachine, CustomGraphMachine], + subflows: List[Workflow] = [], + get_actions=False, + ): + self.workflow = workflow + self.state_machine = state_machine + self.get_actions = get_actions + self.subflows = subflows + + self.is_first_state = False + self.current_state: State = None + + if ( + self.get_actions + and not isinstance(self.state_machine, CustomHierarchicalMachine) + and not isinstance(self.state_machine, CustomHierarchicalGraphMachine) + ): + raise AttributeError( + "The provided state machine must be of the CustomHierarchicalMachine or CustomHierarchicalGraphMachine types." + ) + if not self.get_actions and ( + isinstance(self.state_machine, CustomHierarchicalMachine) + or isinstance(self.state_machine, CustomHierarchicalGraphMachine) + ): + raise AttributeError( + "The provided state machine can not be of the CustomHierarchicalMachine or CustomHierarchicalGraphMachine types." + ) + + def generate(self): + for self.current_state in self.workflow.states: + self.is_first_state = self.workflow.start == self.current_state.name + self.definitions() + self.transitions() + + def transitions(self): + self.start_transition() + self.data_conditions_transitions() + self.event_conditions_transition() + self.error_transitions() + self.natural_transition( + self.current_state.name, + ( + self.current_state.transition + if hasattr(self.current_state, "transition") + else None + ), + ) + self.compensated_by_transition() + self.end_transition() + + def start_transition(self): + if self.is_first_state: + self.state_machine._initial = self.current_state.name + + def data_conditions_transitions(self): + if isinstance(self.current_state, DataBasedSwitchState): + data_conditions = self.current_state.dataConditions + if data_conditions: + state_name = self.current_state.name + for data_condition in data_conditions: + if isinstance(data_condition, TransitionDataCondition): + transition = data_condition.transition + condition = data_condition.condition + self.natural_transition(state_name, transition, condition) + if ( + isinstance(data_condition, EndDataCondition) + and data_condition.end + ): + condition = data_condition.condition + self.end_state(state_name, condition=condition) + self.default_condition_transition(self.current_state) + + def event_conditions_transition(self): + if isinstance(self.current_state, EventBasedSwitchState): + event_conditions = self.current_state.eventConditions + if event_conditions: + state_name = self.current_state.name + for event_condition in event_conditions: + transition = event_condition.transition + event_ref = event_condition.eventRef + self.natural_transition(state_name, transition, event_ref) + if event_condition.end: + self.end_state(state_name, condition=event_ref) + self.default_condition_transition(self.current_state) + + def default_condition_transition(self, state: State): + if hasattr(state, "defaultCondition"): + default_condition = state.defaultCondition + if default_condition: + self.natural_transition( + self.current_state.name, default_condition.transition, "default" + ) + + def end_transition(self): + if hasattr(self.current_state, "end") and self.current_state.end: + self.end_state(self.current_state.name) + + def natural_transition( + self, + source: str, + target: Union[str, Transition], + label: Optional[str] = None, + ): + if target: + if isinstance(target, Transition): + desc_transition = target.nextState + else: + desc_transition = target + if source not in self.state_machine.states.keys(): + self.state_machine.add_states(source) + if desc_transition not in self.state_machine.states.keys(): + self.state_machine.add_states(desc_transition) + self.state_machine.add_transition( + trigger=label if label else "", source=source, dest=desc_transition + ) + + def error_transitions(self): + if hasattr(self.current_state, "onErrors") and ( + on_errors := self.current_state.onErrors + ): + for error in on_errors: + self.natural_transition( + self.current_state.name, + error.transition, + error.errorRef, + ) + + def compensated_by_transition(self): + compensated_by = self.current_state.compensatedBy + if compensated_by: + self.natural_transition( + self.current_state.name, compensated_by, "compensated by" + ) + + def definitions(self): + state_type = self.current_state.type + if state_type == "sleep": + self.sleep_state_details() + elif state_type == "event": + self.event_state_details() + elif state_type == "operation": + self.operation_state_details() + elif state_type == "parallel": + self.parallel_state_details() + elif state_type == "switch": + if self.current_state.dataConditions: + self.data_based_switch_state_details() + elif self.current_state.eventConditions: + self.event_based_switch_state_details() + else: + raise Exception( + f"Unexpected switch type;\n state value= {self.current_state}" + ) + elif state_type == "inject": + self.inject_state_details() + elif state_type == "foreach": + self.foreach_state_details() + elif state_type == "callback": + self.callback_state_details() + else: + raise Exception( + f"Unexpected type= {state_type};\n state value= {self.current_state}" + ) + + def parallel_state_details(self): + if isinstance(self.current_state, ParallelState): + self.state_to_machine_state(["parallel_state", "state"]) + + state_name = self.current_state.name + branches = self.current_state.branches + if branches: + if self.get_actions: + self.state_machine.get_state(state_name).initial = [] + for branch in branches: + if hasattr(branch, "actions") and branch.actions: + branch_name = branch.name + self.state_machine.get_state(state_name).add_substates( + branch_state := self.state_machine.state_cls( + branch_name + ) + ) + self.state_machine.get_state(state_name).initial.append( + branch_name + ) + branch_state.tags = ["branch"] + branch_state.metadata = { + "branch": self.current_state.serialize().__dict__ + } + self.generate_actions_info( + machine_state=branch_state, + state_name=f"{state_name}.{branch_name}", + actions=branch.actions, + ) + + def event_based_switch_state_details(self): + if isinstance(self.current_state, EventBasedSwitchState): + self.state_to_machine_state( + ["event_based_switch_state", "switch_state", "state"] + ) + + def data_based_switch_state_details(self): + if isinstance(self.current_state, DataBasedSwitchState): + self.state_to_machine_state( + ["data_based_switch_state", "switch_state", "state"] + ) + + def inject_state_details(self): + if isinstance(self.current_state, InjectState): + self.state_to_machine_state(["inject_state", "state"]) + + def operation_state_details(self): + if isinstance(self.current_state, OperationState): + machine_state = self.state_to_machine_state(["operation_state", "state"]) + self.generate_actions_info( + machine_state=machine_state, + state_name=self.current_state.name, + actions=self.current_state.actions, + action_mode=self.current_state.actionMode, + ) + + def sleep_state_details(self): + if isinstance(self.current_state, SleepState): + self.state_to_machine_state(["sleep_state", "state"]) + + def event_state_details(self): + if isinstance(self.current_state, EventState): + self.state_to_machine_state(["event_state", "state"]) + + def foreach_state_details(self): + if isinstance(self.current_state, ForEachState): + self.state_to_machine_state(["foreach_state", "state"]) + self.state_machine.add_transition( + trigger=f"{self.current_state.iterationParam} IN {self.current_state.inputCollection}", + source=self.current_state.name, + dest=self.current_state.name, + ) + self.generate_actions_info( + machine_state=self.state_machine.get_state(self.current_state.name), + state_name=self.current_state.name, + actions=self.current_state.actions, + action_mode=self.current_state.mode, + ) + + def callback_state_details(self): + if isinstance(self.current_state, CallbackState): + self.state_to_machine_state(["callback_state", "state"]) + action = self.current_state.action + if action and action.functionRef: + self.generate_actions_info( + machine_state=self.state_machine.get_state(self.current_state.name), + state_name=self.current_state.name, + actions=[action], + ) + + def state_to_machine_state(self, tags: List[str]) -> NestedState: + state_name = self.current_state.name + if state_name not in self.state_machine.states.keys(): + self.state_machine.add_states(state_name) + (ns := self.state_machine.get_state(state_name)).tags = tags + ns.metadata = {"state": self.current_state.serialize().__dict__} + return ns + + def get_subflow_state( + self, machine_state: NestedState, state_name: str, actions: List[Action] + ): + added_states = {} + for i, action in enumerate(actions): + if action.subFlowRef: + if isinstance(action.subFlowRef, str): + workflow_id = action.subFlowRef + workflow_version = None + else: + workflow_id = action.subFlowRef.workflowId + workflow_version = action.subFlowRef.version + none_found = True + for sf in self.subflows: + if sf.id == workflow_id and ( + (workflow_version and sf.version == workflow_version) + or not workflow_version + ): + none_found = False + new_machine = CustomHierarchicalMachine( + model=None, initial=None, auto_transitions=False + ) + + # Generate the state machine for the subflow + StateMachineGenerator( + workflow=sf, + state_machine=new_machine, + get_actions=self.get_actions, + subflows=self.subflows, + ).generate() + + # Convert the new_machine into a NestedState + added_states[i] = self.subflow_state_name( + action=action, subflow=sf + ) + nested_state = self.state_machine.state_cls(added_states[i]) + nested_state.tags = ["subflow"] + machine_state.add_substate(nested_state) + self.state_machine_to_nested_state( + state_name=state_name, + state_machine=new_machine, + nested_state=nested_state, + ) + + if none_found: + warnings.warn( + f"Specified subflow [{workflow_id} {workflow_version if workflow_version else ''}] not found.", + category=UserWarning, + ) + return added_states + + def generate_actions_info( + self, + machine_state: NestedState, + state_name: str, + actions: List[Dict[str, Action]], + action_mode: str = "sequential", + ): + if self.get_actions: + parallel_states = [] + if actions: + new_subflows_names = self.get_subflow_state( + machine_state=machine_state, state_name=state_name, actions=actions + ) + for i, action in enumerate(actions): + name = None + if action.functionRef: + name = ( + self.get_function_name(action.functionRef) + if isinstance(action.functionRef, str) + else ( + action.functionRef.refName + if isinstance(action.functionRef, FunctionRef) + else None + ) + ) + if name not in machine_state.states.keys(): + machine_state.add_substate( + ns := self.state_machine.state_cls(name) + ) + ns.tags = ["function"] + self.get_action_function(state=ns, f_name=name) + elif action.subFlowRef: + name = new_subflows_names.get(i) + elif action.eventRef: + name = f"{action.eventRef.triggerEventRef}/{action.eventRef.resultEventRef}" + if name not in machine_state.states.keys(): + machine_state.add_substate( + ns := self.state_machine.state_cls(name) + ) + ns.tags = ["event"] + self.get_action_event(state=ns, e_name=name) + if name: + if action_mode == "sequential": + if i < len(actions) - 1: + # get next name + next_name = None + if actions[i + 1].functionRef: + next_name = ( + self.get_function_name( + actions[i + 1].functionRef + ) + if isinstance(actions[i + 1].functionRef, str) + else ( + actions[i + 1].functionRef.refName + if isinstance( + actions[i + 1].functionRef, FunctionRef + ) + else None + ) + ) + if ( + next_name + not in self.state_machine.get_state( + state_name + ).states.keys() + ): + machine_state.add_substate( + ns := self.state_machine.state_cls( + next_name + ) + ) + ns.tags = ["function"] + self.get_action_function( + state=ns, f_name=next_name + ) + elif actions[i + 1].subFlowRef: + next_name = new_subflows_names.get(i + 1) + elif actions[i + 1].eventRef: + next_name = f"{action.eventRef.triggerEventRef}/{action.eventRef.resultEventRef}" + if ( + next_name + not in self.state_machine.get_state( + state_name + ).states.keys() + ): + machine_state.add_substate( + ns := self.state_machine.state_cls( + next_name + ) + ) + ns.tags = ["event"] + self.get_action_event( + state=ns, e_name=next_name + ) + self.state_machine.add_transition( + trigger="", + source=f"{state_name}.{name}", + dest=f"{state_name}.{next_name}", + ) + if i == 0: + machine_state.initial = name + elif action_mode == "parallel": + parallel_states.append(name) + if action_mode == "parallel": + machine_state.initial = parallel_states + + def get_action_function(self, state: NestedState, f_name: str): + if self.workflow.functions: + for function in self.workflow.functions: + current_function = function.serialize().__dict__ + if current_function["name"] == f_name: + state.metadata = {"function": current_function} + break + + def get_action_event(self, state: NestedState, e_name: str): + if self.workflow.events: + for event in self.workflow.events: + current_event = event.serialize().__dict__ + if current_event["name"] == e_name: + state.metadata = {"event": current_event} + break + + def subflow_state_name(self, action: Action, subflow: Workflow): + return ( + action.name + if action.name + else f"{subflow.id}/{subflow.version.replace(NestedState.separator, '-')}" + ) + + def add_all_sub_states( + self, + original_state: Union[NestedState, CustomHierarchicalMachine], + new_state: NestedState, + ): + if len(original_state.states) == 0: + return + for substate in original_state.states.values(): + new_state.add_substate(ns := self.state_machine.state_cls(substate.name)) + ns.tags = substate.tags + ns.metadata = substate.metadata + self.add_all_sub_states(substate, ns) + new_state.initial = original_state.initial + + def state_machine_to_nested_state( + self, + state_name: str, + state_machine: CustomHierarchicalMachine, + nested_state: NestedState, + ) -> NestedState: + self.add_all_sub_states(state_machine, nested_state) + + for trigger, event in state_machine.events.items(): + for transition_l in event.transitions.values(): + for transition in transition_l: + source = transition.source + dest = transition.dest + self.state_machine.add_transition( + trigger=trigger, + source=f"{state_name}.{nested_state.name}.{source}", + dest=f"{state_name}.{nested_state.name}.{dest}", + ) + + def get_function_name( + self, fn_ref: Union[Dict[str, Any], str, None] + ) -> Optional[str]: + if isinstance(fn_ref, dict) and "refName" in fn_ref: + return fn_ref["refName"] + elif isinstance(fn_ref, str): + return fn_ref + return None + + def end_state(self, name, condition=None): + if name not in self.state_machine.states.keys(): + self.state_machine.add_states(name) + + if not condition: + self.state_machine.get_state(name).final = True + else: + if "[*]" not in self.state_machine.states.keys(): + self.state_machine.add_states("[*]") + self.state_machine.get_state("[*]").final = True + self.state_machine.add_transition( + trigger=condition if condition else "", source=name, dest="[*]" + ) diff --git a/serverlessworkflow/sdk/state_machine_helper.py b/serverlessworkflow/sdk/state_machine_helper.py new file mode 100644 index 0000000..cefeb69 --- /dev/null +++ b/serverlessworkflow/sdk/state_machine_helper.py @@ -0,0 +1,114 @@ +from typing import List +from serverlessworkflow.sdk.workflow import Workflow +from serverlessworkflow.sdk.state_machine_generator import StateMachineGenerator +from transitions.extensions.diagrams import HierarchicalGraphMachine, GraphMachine +from serverlessworkflow.sdk.state_machine_extensions import ( + CustomGraphMachine, + CustomHierarchicalGraphMachine, +) +from transitions.extensions.nesting import NestedState +from transitions.extensions.diagrams_base import BaseGraph + + +class StateMachineHelper: + FINAL_NODE_STYLE = {"peripheries": "2", "color": "red"} + INITIAL_NODE_STYLE = {"peripheries": "2", "color": "green"} + TAGS = [ + "parallel_state", + "switch_state", + "inject_state", + "operation_state", + "sleep_state", + "event_state", + "foreach_state", + "callback_state", + "subflow", + "function", + "event", + "branch", + ] + COLORS = [ + "#8dd3c7", + "#ffffb3", + "#bebada", + "#fb8072", + "#80b1d3", + "#fdb462", + "#b3de69", + "#fccde5", + "#d9d9d9", + "#bc80bd", + "#ccebc5", + "#ffed6f", + ] + + def __init__( + self, + workflow: Workflow, + subflows: List[Workflow] = [], + get_actions=False, + title="", + ): + self.subflows = subflows + self.get_actions = get_actions + + machine_type = ( + CustomHierarchicalGraphMachine if self.get_actions else CustomGraphMachine + ) + + # Generate machine + self.machine = machine_type( + model=None, + initial=None, + show_conditions=True, + auto_transitions=False, + title=title, + ) + StateMachineGenerator( + workflow=workflow, + state_machine=self.machine, + get_actions=self.get_actions, + subflows=subflows, + ).generate() + + delattr(self.machine, "get_graph") + del self.machine.style_attributes["node"]["active"] + del self.machine.style_attributes["graph"]["active"] + self.machine.add_model(machine_type.self_literal) + + def draw(self, filename: str, graph_engine="pygraphviz"): + if graph_engine == "mermaid": + self.machine.graph_cls = self.machine._init_graphviz_engine( + graph_engine="mermaid" + ) + self.machine.model_graphs[id(self.machine.model)] = self.machine.graph_cls( + self.machine + ) + + # Define style + for name in ( + self.machine.get_nested_state_names() + if self.get_actions + else self.machine.states.keys() + ): + if self.machine.get_state(name).final or self.machine.initial == name: + self.machine.style_attributes["node"][name] = ( + self.FINAL_NODE_STYLE + if self.machine.get_state(name).final + else self.INITIAL_NODE_STYLE + ) + self.machine.model_graphs[id(self.machine.model)].set_node_style( + name, name + ) + + for tag in self.machine.get_state(name).tags: + if tag in self.TAGS: + self.machine.style_attributes["node"][name] = { + "fillcolor": self.COLORS[self.TAGS.index(tag)] + } + self.machine.model_graphs[id(self.machine.model)].set_node_style( + name, name + ) + break + + self.machine.get_graph().draw(filename, prog="dot") diff --git a/setup.py b/setup.py index 815a103..a96ef63 100644 --- a/setup.py +++ b/setup.py @@ -13,9 +13,8 @@ url="https://serverlessworkflow.io/", author='Serverless Workflow Contributors', license='http://www.apache.org/licenses/LICENSE-2.0.txt', - install_requires=['pyyaml==6.0', "jsonschema==4.4.0", "requests"], + install_requires=['pyyaml==6.0', "jsonschema==4.4.0", "requests", "pygraphviz==1.11", "transitions==0.9.2"], setup_requires=['pytest-runner'], tests_require=['pytest'], test_suite='tests', - ) diff --git a/tests/examples/advertise-listing.json b/tests/examples/advertise-listing.json new file mode 100644 index 0000000..80ac7d9 --- /dev/null +++ b/tests/examples/advertise-listing.json @@ -0,0 +1,57 @@ +{ + "id": "advertise-listing", + "version": "0.1.0", + "specVersion": "0.8", + "start": "f8-advertise-listing", + "states": [ + { + "name": "f8-advertise-listing", + "type": "operation", + "actions": [ + { + "functionRef": "f8" + } + ], + "transition": "advertise-parallel" + }, + { + "name": "advertise-parallel", + "type": "parallel", + "branches": [ + { + "name": "send-sms", + "actions": [ + { + "functionRef": "f9" + } + ] + }, + { + "name": "tweet", + "actions": [ + { + "functionRef": "f10" + } + ] + }, + { + "name": "messenger-chatbot", + "actions": [ + { + "functionRef": "f11" + } + ] + }, + { + "name": "test", + "actions": [ + { + "subFlowRef": "second-subgraph" + } + ] + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/tests/examples/graph.json b/tests/examples/graph.json new file mode 100644 index 0000000..c34652d --- /dev/null +++ b/tests/examples/graph.json @@ -0,0 +1,181 @@ +{ + "id": "graph", + "version": "0.1.0", + "specVersion": "0.8", + "description": "Real estate listing website", + "start": "entry-event", + "states": [ + { + "name": "entry-event", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "triggerEvent" + ], + "actions": [ + { + "functionRef": "process-event" + } + ] + } + ], + "stateDataFilter": { + "output": "${ .transformed }" + }, + "transition": "entry-decision" + }, + { + "name": "entry-decision", + "type": "switch", + "dataConditions": [ + { + "condition": "${ .\"postListing\" == true }", + "transition": "f1-upload-listing" + }, + { + "condition": "${ .\"submitDoc\" == true }", + "transition": "f2-upload-verification" + }, + { + "condition": "${ .\"submitClientInfo\" == true }", + "transition": "f3-upload-client" + } + ], + "defaultCondition": { + "end": true + } + }, + { + "name": "f1-upload-listing", + "type": "operation", + "actionMode": "parallel", + "actions": [ + { + "functionRef": "f1" + }, + { + "functionRef": "f2" + }, + { + "functionRef": { + "refName": "f5" + } + } + ], + "transition": "d1" + }, + { + "name": "d1", + "type": "operation", + "actions": [ + { + "eventRef": { + "triggerEventRef": "uploadPhoto", + "resultEventRef": "newPhoto" + } + } + ], + "transition": "f4-photo-verification" + }, + { + "name": "f4-photo-verification", + "type": "operation", + "actions": [ + { + "functionRef": "f4" + } + ], + "transition": "f5-image-correction" + }, + { + "name": "f5-image-correction", + "type": "operation", + "actions": [ + { + "functionRef": "f5" + } + ], + "transition": "f6-post-website" + }, + { + "name": "f6-post-website", + "type": "operation", + "actions": [ + { + "functionRef": "f6" + } + ], + "transition": "advertise-listing" + }, + { + "name": "advertise-listing", + "type": "operation", + "actions": [ + { + "subFlowRef": "advertise-listing" + } + ], + "end": true + }, + { + "name": "f2-upload-verification", + "type": "operation", + "actions": [ + { + "functionRef": "f2" + } + ], + "transition": "d2" + }, + { + "name": "f3-upload-client", + "type": "operation", + "actions": [ + { + "functionRef": "f3" + } + ], + "transition": "d2" + }, + { + "name": "d2", + "type": "operation", + "actions": [ + { + "eventRef": { + "triggerEventRef": "uploadInfo", + "resultEventRef": "newInfo" + } + } + ], + "transition": "new-info" + }, + { + "name": "new-info", + "type": "parallel", + "branches": [ + { + "name": "post-website", + "actions": [ + { + "functionRef": "f7" + }, + { + "functionRef": "f6" + } + ] + }, + { + "name": "advertise", + "actions": [ + { + "subFlowRef": "advertise-listing" + } + ] + } + ], + "end": true + } + ] +} \ No newline at end of file diff --git a/tests/examples/second-subgraph.json b/tests/examples/second-subgraph.json new file mode 100644 index 0000000..ef1c0aa --- /dev/null +++ b/tests/examples/second-subgraph.json @@ -0,0 +1,66 @@ +{ + "id": "second-subgraph", + "version": "0.1.0", + "specVersion": "0.8", + "start": "test-switch", + "states": [ + { + "name": "test-switch", + "type": "switch", + "dataConditions": [ + { + "condition": "condition1", + "transition": "test" + }, + { + "condition": "condition2", + "transition": "test2" + } + ], + "defaultCondition": { + "end": true + } + }, + { + "name": "test", + "type": "parallel", + "branches": [ + { + "name": "send-sms", + "actions": [ + { + "functionRef": "f9" + } + ] + }, + { + "name": "tweet", + "actions": [ + { + "functionRef": "f10" + } + ] + }, + { + "name": "messenger-chatbot", + "actions": [ + { + "functionRef": "f11" + } + ] + } + ], + "end": true + }, + { + "name": "test2", + "type": "operation", + "actions": [ + { + "functionRef": "functionref" + } + ], + "end": true + } + ] +} diff --git a/tests/serverlessworkflow/sdk/test_workflow.py b/tests/serverlessworkflow/sdk/test_workflow.py index d61d64c..335d69d 100644 --- a/tests/serverlessworkflow/sdk/test_workflow.py +++ b/tests/serverlessworkflow/sdk/test_workflow.py @@ -119,7 +119,7 @@ def test_programmatically_create_workflow(self): def test_workflow_from_source_json(self): examples_dir = os.path.join(os.path.dirname(__file__), '../../examples') examples = listdir(examples_dir) - self.assertEqual(len(examples), 10) + self.assertEqual(len(examples), 13) for example in examples: with self.subTest(f"test_{example}"): @@ -130,7 +130,7 @@ def test_workflow_from_source_json(self): def test_instance_workflow_class(self): examples_dir = os.path.join(os.path.dirname(__file__), '../../examples') examples = listdir(examples_dir) - self.assertEqual(len(examples), 10) + self.assertEqual(len(examples), 13) for example in examples: with self.subTest(f"test_{example}"): diff --git a/tests/serverlessworkflow/sdk/test_workflow_validator.py b/tests/serverlessworkflow/sdk/test_workflow_validator.py index 4a55068..f99de17 100644 --- a/tests/serverlessworkflow/sdk/test_workflow_validator.py +++ b/tests/serverlessworkflow/sdk/test_workflow_validator.py @@ -13,7 +13,7 @@ class TestWorkflowValidator(unittest.TestCase): def test_validate_examples(self): examples_dir = os.path.join(os.path.dirname(__file__), '../../examples') examples = listdir(examples_dir) - self.assertEqual(len(examples), 10) + self.assertEqual(len(examples), 13) for example in examples: with self.subTest(f"test_{example}"):
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: