diff --git a/README.rst b/README.rst index 1f471d3..086a375 100644 --- a/README.rst +++ b/README.rst @@ -38,7 +38,7 @@ The project's written in Python and uses `Poetry`_ for dependency and package management. We also use `pre-commit`_ to manage our pre-commit hooks, which rely on `black`_, `mypy`_, `pylint`_, amongst others. -From within a VS Code `devcontainer`_] environment (recommended):: +From within a VS Code `devcontainer`_ environment (recommended):: poetry install --with dev --sync pre-commit install -t commit-msg -t pre-commit diff --git a/poetry.lock b/poetry.lock index 7ca8cda..716a33c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -43,78 +43,99 @@ files = [ [[package]] name = "coverage" -version = "7.8.2" +version = "7.10.6" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a"}, - {file = "coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be"}, - {file = "coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3"}, - {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6"}, - {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622"}, - {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c"}, - {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"}, - {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404"}, - {file = "coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7"}, - {file = "coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347"}, - {file = "coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9"}, - {file = "coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879"}, - {file = "coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a"}, - {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5"}, - {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11"}, - {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a"}, - {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb"}, - {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54"}, - {file = "coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a"}, - {file = "coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975"}, - {file = "coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53"}, - {file = "coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c"}, - {file = "coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1"}, - {file = "coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279"}, - {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99"}, - {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20"}, - {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2"}, - {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57"}, - {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f"}, - {file = "coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8"}, - {file = "coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223"}, - {file = "coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f"}, - {file = "coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca"}, - {file = "coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d"}, - {file = "coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85"}, - {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257"}, - {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108"}, - {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0"}, - {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050"}, - {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48"}, - {file = "coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7"}, - {file = "coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3"}, - {file = "coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7"}, - {file = "coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008"}, - {file = "coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36"}, - {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46"}, - {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be"}, - {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740"}, - {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625"}, - {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b"}, - {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199"}, - {file = "coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8"}, - {file = "coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d"}, - {file = "coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b"}, - {file = "coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a"}, - {file = "coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d"}, - {file = "coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca"}, - {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d"}, - {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787"}, - {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7"}, - {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3"}, - {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7"}, - {file = "coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a"}, - {file = "coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e"}, - {file = "coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837"}, - {file = "coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32"}, - {file = "coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27"}, + {file = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}, + {file = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}, + {file = "coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}, + {file = "coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}, + {file = "coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}, + {file = "coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}, + {file = "coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}, + {file = "coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}, + {file = "coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}, + {file = "coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}, + {file = "coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}, + {file = "coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}, + {file = "coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}, + {file = "coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}, + {file = "coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}, + {file = "coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}, + {file = "coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}, + {file = "coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}, + {file = "coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}, + {file = "coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}, + {file = "coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}, + {file = "coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78"}, + {file = "coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c"}, + {file = "coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf"}, + {file = "coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}, + {file = "coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}, ] [package.extras] @@ -122,40 +143,35 @@ toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.9" +version = "0.4.0" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, - {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, + {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, + {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, ] [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" description = "A platform independent file lock." optional = false python-versions = ">=3.9" files = [ - {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, - {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, + {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"}, + {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, ] -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2)"] - [[package]] name = "identify" -version = "2.6.12" +version = "2.6.13" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}, - {file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}, + {file = "identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b"}, + {file = "identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32"}, ] [package.extras] @@ -163,13 +179,13 @@ license = ["ukkonen"] [[package]] name = "im-data-manager-job-decoder" -version = "2.1.0" +version = "2.5.0" description = "Job decoding logic" optional = false python-versions = ">=3.10" files = [ - {file = "im_data_manager_job_decoder-2.1.0-py3-none-any.whl", hash = "sha256:b4eefdbdf3d7f5ccb9e154f1d737ca4d25f31e74a94d3a620c71a3752c49d4f8"}, - {file = "im_data_manager_job_decoder-2.1.0.tar.gz", hash = "sha256:11ce891837c7e152be241caac137df192764c06cf2ab6ce84890825bb8c12d25"}, + {file = "im_data_manager_job_decoder-2.5.0-py3-none-any.whl", hash = "sha256:d177a37083b73c82d71c137cd36ab3bf54de0a4ab5ab55e5aec49acb238b86f6"}, + {file = "im_data_manager_job_decoder-2.5.0.tar.gz", hash = "sha256:1a0523ccead3ad851dcf6a450ec1792be1830d20a938d3ddfdf04ffcdf915a47"}, ] [package.dependencies] @@ -221,13 +237,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.24.0" +version = "4.25.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.9" files = [ - {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, - {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, ] [package.dependencies] @@ -238,7 +254,7 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -348,13 +364,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.4.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" files = [ - {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, - {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, ] [package.extras] @@ -397,41 +413,56 @@ virtualenv = ">=20.10.0" [[package]] name = "protobuf" -version = "6.31.1" +version = "6.32.0" description = "" optional = false python-versions = ">=3.9" files = [ - {file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}, - {file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}, - {file = "protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402"}, - {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}, - {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}, - {file = "protobuf-6.31.1-cp39-cp39-win32.whl", hash = "sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"}, - {file = "protobuf-6.31.1-cp39-cp39-win_amd64.whl", hash = "sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"}, - {file = "protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}, - {file = "protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a"}, + {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"}, + {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"}, + {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"}, + {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"}, + {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"}, + {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, + {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, ] +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "pyyaml" @@ -513,150 +544,188 @@ typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "rpds-py" -version = "0.25.1" +version = "0.27.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, - {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, - {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, - {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, - {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, - {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, - {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, - {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, - {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, - {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, + {file = "rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef"}, + {file = "rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1"}, + {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10"}, + {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808"}, + {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8"}, + {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9"}, + {file = "rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4"}, + {file = "rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1"}, + {file = "rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881"}, + {file = "rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a"}, + {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde"}, + {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21"}, + {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9"}, + {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948"}, + {file = "rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39"}, + {file = "rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15"}, + {file = "rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746"}, + {file = "rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90"}, + {file = "rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a"}, + {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444"}, + {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a"}, + {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1"}, + {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998"}, + {file = "rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39"}, + {file = "rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594"}, + {file = "rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502"}, + {file = "rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b"}, + {file = "rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d"}, + {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274"}, + {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd"}, + {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2"}, + {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002"}, + {file = "rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3"}, + {file = "rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83"}, + {file = "rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d"}, + {file = "rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228"}, + {file = "rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21"}, + {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef"}, + {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081"}, + {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd"}, + {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7"}, + {file = "rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688"}, + {file = "rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797"}, + {file = "rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334"}, + {file = "rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9"}, + {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60"}, + {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e"}, + {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212"}, + {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675"}, + {file = "rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3"}, + {file = "rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456"}, + {file = "rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3"}, + {file = "rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2"}, + {file = "rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48"}, + {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb"}, + {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734"}, + {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb"}, + {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0"}, + {file = "rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a"}, + {file = "rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772"}, + {file = "rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527"}, + {file = "rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e"}, + {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e"}, + {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786"}, + {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec"}, + {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b"}, + {file = "rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52"}, + {file = "rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b"}, + {file = "rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6"}, + {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c"}, + {file = "rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859"}, + {file = "rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8"}, ] [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] name = "virtualenv" -version = "20.31.2" +version = "20.34.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, - {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, + {file = "virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026"}, + {file = "virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a"}, ] [package.dependencies] @@ -671,4 +740,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "53c45992ce1109262a0db6e79aced43423e6fd83798b0b2bf45acca1bfc6d056" +content-hash = "341541770454fac78492e8f33f1aca9418582886dfe2d24af99cc06dbd1b7137" diff --git a/pyproject.toml b/pyproject.toml index f2ea162..cde7f6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ packages = [ [tool.poetry.dependencies] python = "^3.12" im-protobuf = "^8.2.0" -im-data-manager-job-decoder = "^2.1.0" +im-data-manager-job-decoder = "^2.5.0" jsonschema = "^4.21.1" pyyaml = ">= 5.3.1, < 7.0" diff --git a/tests/instance_launcher.py b/tests/instance_launcher.py index 3256c0b..c938e9e 100644 --- a/tests/instance_launcher.py +++ b/tests/instance_launcher.py @@ -68,7 +68,7 @@ def __init__( elif os.path.isdir(file_path): shutil.rmtree(file_path) - def launch(self, launch_parameters: LaunchParameters) -> LaunchResult: + def launch(self, *, launch_parameters: LaunchParameters) -> LaunchResult: assert launch_parameters assert launch_parameters.project_id == TEST_PROJECT_ID assert launch_parameters.specification @@ -76,10 +76,24 @@ def launch(self, launch_parameters: LaunchParameters) -> LaunchResult: os.makedirs(EXECUTION_DIRECTORY, exist_ok=True) - # Create an Instance record (and dummy Task ID) - response = self._api_adapter.create_instance( - running_workflow_step_id=launch_parameters.running_workflow_step_id + # Create a running workflow step + assert launch_parameters.running_workflow_id + assert launch_parameters.step_name + response, _ = self._api_adapter.create_running_workflow_step( + running_workflow_id=launch_parameters.running_workflow_id, + step=launch_parameters.step_name, + replica=launch_parameters.step_replication_number, ) + assert "id" in response + rwfs_id: str = response["id"] + # And add the variables we've been provided with + if launch_parameters.variables: + _ = self._api_adapter.set_running_workflow_step_variables( + running_workflow_step_id=rwfs_id, variables=launch_parameters.variables + ) + + # Create an Instance record (and dummy Task ID) + response = self._api_adapter.create_instance(running_workflow_step_id=rwfs_id) instance_id = response["id"] task_id = "task-00000000-0000-0000-0000-000000000001" @@ -96,8 +110,8 @@ def launch(self, launch_parameters: LaunchParameters) -> LaunchResult: # The command may not need any, but we do the decoding anyway. decoded_command, status = job_decoder.decode( job["command"], - launch_parameters.specification_variables, - launch_parameters.running_workflow_step_id, + launch_parameters.variables, + rwfs_id, TextEncoding.JINJA2_3_0, ) print(f"Decoded command: {decoded_command}") @@ -129,6 +143,7 @@ def launch(self, launch_parameters: LaunchParameters) -> LaunchResult: self._msg_dispatcher.send(pod_message) return LaunchResult( + running_workflow_step_id=rwfs_id, instance_id=instance_id, task_id=task_id, command=" ".join(subprocess_cmd), diff --git a/tests/job-definitions/job-definitions.yaml b/tests/job-definitions/job-definitions.yaml index 66afcd3..0c48e84 100644 --- a/tests/job-definitions/job-definitions.yaml +++ b/tests/job-definitions/job-definitions.yaml @@ -132,3 +132,14 @@ jobs: concatenate: command: >- concatenate.py {% for ifile in inputFile %}{{ ifile }} {% endfor %} --outputFile {{ outputFile }} + + splitsmiles: + command: >- + copyf.py {{ inputFile }} + # Simulate multiple output files... + variables: + outputs: + properties: + outputBase: + creates: '{{ outputBase }}_*.smi' + type: files diff --git a/tests/jobs/copyf.py b/tests/jobs/copyf.py new file mode 100644 index 0000000..23dc38b --- /dev/null +++ b/tests/jobs/copyf.py @@ -0,0 +1,30 @@ +import shutil +import sys +from pathlib import Path + + +def main(): + print("copyf job runnint") + if len(sys.argv) != 2: + print("Usage: python copy_file.py ") + sys.exit(1) + + original_path = Path(sys.argv[1]) + + if not original_path.exists() or not original_path.is_file(): + print(f"Error: '{original_path}' does not exist or is not a file.") + sys.exit(1) + + # Create a new filename like 'example_copy.txt' + new_name = original_path.absolute().parent.joinpath("chunk_1.smi") + new_path = original_path.with_name(new_name.name) + shutil.copyfile(original_path, new_path) + + new_name = original_path.absolute().parent.joinpath("chunk_2.smi") + new_path = original_path.with_name(new_name.name) + + shutil.copyfile(original_path, new_path) + + +if __name__ == "__main__": + main() diff --git a/tests/jobs/copyf.sh b/tests/jobs/copyf.sh new file mode 100755 index 0000000..8994a2b --- /dev/null +++ b/tests/jobs/copyf.sh @@ -0,0 +1,4 @@ +#! /bin/bash + +cp "$1" chunk_1.smi +cp "$1" chunk_2.smi diff --git a/tests/jobs/split-smi.sh b/tests/jobs/split-smi.sh new file mode 100755 index 0000000..48a2fb3 --- /dev/null +++ b/tests/jobs/split-smi.sh @@ -0,0 +1,72 @@ +#!/bin/bash +set -euo pipefail + +if [[ $# -lt 3 || $# -gt 4 ]]; then + echo "Usage: $0 [has_header: yes]" + exit 1 +fi + +input_file="$1" +lines_per_file="$2" +base_name="$3" +has_header="${4:-no}" + +# Determine how to read the file (plain text or gzipped) +if [[ "$input_file" == *.gz ]]; then + reader="zcat" +else + reader="cat" +fi + +if ! [[ -f "$input_file" ]]; then + echo "Error: File '$input_file' not found" + exit 1 +fi + +# Extract header if present +if [[ "$has_header" == "yes" ]]; then + header="$($reader "$input_file" | head -n1)" + data_start=2 +else + header="" + data_start=1 +fi + +# Count number of data lines (excluding header if present) +data_lines="$($reader "$input_file" | tail -n +"$data_start" | wc -l)" +if [[ "$data_lines" -eq 0 ]]; then + echo "No data lines to process." + exit 0 +fi + +# Calculate number of output files and required zero padding +num_files=$(( (data_lines + lines_per_file - 1) / lines_per_file )) +pad_width=0 +if [[ "$num_files" -gt 1 ]]; then + pad_width=${#num_files} +fi + +# Split logic +$reader "$input_file" | tail -n +"$data_start" | awk -v header="$header" -v lines="$lines_per_file" -v base="$base_name" -v pad="$pad_width" ' +function new_file() { + suffix = (pad > 0) ? sprintf("%0*d", pad, file_index) : file_index + file = base "_" suffix ".smi" + if (header != "") { + print header > file + } + file_index++ + line_count = 0 +} +{ + if (line_count == 0) { + new_file() + } + print >> file + line_count++ + if (line_count == lines) { + close(file) + print file " created" + line_count = 0 + } +} +' file_index=1 diff --git a/tests/test_decoder.py b/tests/test_decoder.py index ddac876..4958731 100644 --- a/tests/test_decoder.py +++ b/tests/test_decoder.py @@ -43,19 +43,6 @@ ) assert _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW -_DUPLICATE_WORKFLOW_VARIABLE_NAMES_WORKFLOW_FILE: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "duplicate-workflow-variable-names.yaml", -) -with open( - _DUPLICATE_WORKFLOW_VARIABLE_NAMES_WORKFLOW_FILE, "r", encoding="utf8" -) as workflow_file: - _DUPLICATE_WORKFLOW_VARIABLE_NAMES_WORKFLOW: Dict[str, Any] = yaml.safe_load( - workflow_file - ) -assert _DUPLICATE_WORKFLOW_VARIABLE_NAMES_WORKFLOW - _SIMPLE_PYTHON_PARALLEL_FILE: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", @@ -78,15 +65,6 @@ ) assert _STEP_SPECIFICATION_VARIABLE_NAMES_WORKFLOW -_WORKFLOW_OPTIONS_WORKFLOW_FILE: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "workflow-options.yaml", -) -with open(_WORKFLOW_OPTIONS_WORKFLOW_FILE, "r", encoding="utf8") as workflow_file: - _WORKFLOW_OPTIONS: Dict[str, Any] = yaml.safe_load(workflow_file) -assert _WORKFLOW_OPTIONS - def test_validate_schema_for_minimal(): # Arrange @@ -144,7 +122,7 @@ def test_validate_schema_for_shortcut_example_1(): assert error is None -def test_validate_schema_for_python_simple_molprops(): +def test_validate_schema_for_simple_python_molprops(): # Arrange # Act @@ -164,31 +142,11 @@ def test_validate_schema_for_step_specification_variable_names(): assert error is None -def test_validate_schema_for_workflow_options(): - # Arrange - - # Act - error = decoder.validate_schema(_WORKFLOW_OPTIONS) - - # Assert - assert error is None - - -def test_validate_schema_for_simple_python_parallel(): - # Arrange - - # Act - error = decoder.validate_schema(_SIMPLE_PYTHON_PARALLEL_WORKFLOW) - - # Assert - assert error is None - - def test_get_workflow_variables_for_smiple_python_molprops(): # Arrange # Act - wf_variables = decoder.get_variable_names(_SIMPLE_PYTHON_MOLPROPS_WORKFLOW) + wf_variables = decoder.get_workflow_variable_names(_SIMPLE_PYTHON_MOLPROPS_WORKFLOW) # Assert assert len(wf_variables) == 2 @@ -228,134 +186,18 @@ def test_get_workflow_steps(): assert steps[1]["name"] == "step2" -def test_get_workflow_variables_for_duplicate_variables(): - # Arrange - - # Act - names = decoder.get_variable_names(_DUPLICATE_WORKFLOW_VARIABLE_NAMES_WORKFLOW) - - # Assert - assert len(names) == 2 - assert names[0] == "x" - assert names[1] == "x" - - -def test_get_required_variable_names_for_simnple_python_molprops_with_options(): - # Arrange - - # Act - rqd_variables = decoder.get_required_variable_names( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW - ) - - # Assert - assert len(rqd_variables) == 2 - assert "candidateMolecules" in rqd_variables - assert "rdkitPropertyValue" in rqd_variables - - -def test_set_variables_from_options_for_step_for_simnple_python_molprops_with_options(): - # Arrange - variables = { - "rdkitPropertyName": "propertyName", - "rdkitPropertyValue": "propertyValue", - } - - # Act - new_variables = decoder.set_variables_from_options_for_step( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, - variables, - "step1", - ) - - # Assert - assert len(new_variables) == 2 - assert "name" in new_variables - assert "value" in new_variables - assert new_variables["name"] == "propertyName" - assert new_variables["value"] == "propertyValue" - - -def test_get_workflow_inputs_for_step_with_name_step1(): - # Arrange - - # Act - inputs = decoder.get_workflow_job_input_names_for_step( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "step1" - ) - - # Assert - assert len(inputs) == 1 - assert "inputFile" in inputs - - -def test_get_workflow_inputs_for_step_with_name_step2(): - # Arrange - - # Act - inputs = decoder.get_workflow_job_input_names_for_step( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "step2" - ) - - # Assert - assert not inputs - - -def test_get_workflow_inputs_for_step_with_unkown_step_name(): - # Arrange - - # Act - inputs = decoder.get_workflow_job_input_names_for_step( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "unknown" - ) - - # Assert - assert not inputs - - -def test_get_workflow_outputs_for_step_with_name_step1(): - # Arrange - - # Act - has_outputs = decoder.workflow_step_has_outputs( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "step1" - ) - - # Assert - assert not has_outputs - - -def test_get_workflow_outputs_for_step_with_name_step2(): - # Arrange - - # Act - has_outputs = decoder.workflow_step_has_outputs( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "step2" - ) - - # Assert - assert has_outputs - - -def test_get_workflow_outputs_for_step_with_unkown_step_name(): - # Arrange - - # Act - has_outputs = decoder.workflow_step_has_outputs( - _SIMPLE_PYTHON_MOLPROPS_WITH_OPTIONS_WORKFLOW, "unknown" - ) - - # Assert - assert not has_outputs - - def test_get_step_input_variable_names_when_duplicates(): # Arrange + workflow_filename: str = os.path.join( + os.path.dirname(__file__), + "workflow-definitions", + "duplicate-step-input-output-variable-names.yaml", + ) + with open(workflow_filename, "r", encoding="utf8") as wf_file: + definition: Dict[str, Any] = yaml.safe_load(wf_file) # Act - inputs = decoder.get_step_input_variable_names( - _SIMPLE_PYTHON_PARALLEL_WORKFLOW, "final-step" - ) + inputs = decoder.get_step_input_variable_names(definition, "step-1") # Assert assert len(inputs) == 2 @@ -368,13 +210,13 @@ def test_get_step_output_variable_names_when_duplicates(): workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", - "duplicate-step-output-variable-names.yaml", + "duplicate-step-input-output-variable-names.yaml", ) with open(workflow_filename, "r", encoding="utf8") as wf_file: definition: Dict[str, Any] = yaml.safe_load(wf_file) # Act - outputs = decoder.get_step_output_variable_names(definition, "step-1") + outputs = decoder.get_step_output_variable_names(definition, "step-2") # Assert assert len(outputs) == 2 diff --git a/tests/test_test_instance_launcher.py b/tests/test_test_instance_launcher.py index 38b8e06..33a34b1 100644 --- a/tests/test_test_instance_launcher.py +++ b/tests/test_test_instance_launcher.py @@ -33,23 +33,18 @@ def test_launch_nop(basic_launcher): project_id=TEST_PROJECT_ID, variables={}, ) - response, _ = utaa.create_running_workflow_step( - running_workflow_id=response["id"], step="step-1" - ) - rwfsid = response["id"] lp: LaunchParameters = LaunchParameters( project_id=TEST_PROJECT_ID, name="Test Instance", launching_user_name="dlister", launching_user_api_token="1234567890", running_workflow_id=rwfid, - running_workflow_step_id=rwfsid, + step_name="step-1", specification={"collection": "workflow-engine-unit-test-jobs", "job": "nop"}, - specification_variables={}, ) # Act - result = launcher.launch(lp) + result = launcher.launch(launch_parameters=lp) # Assert assert result.error_num == 0 @@ -69,26 +64,21 @@ def test_launch_nop_fail(basic_launcher): variables={}, ) rwfid = response["id"] - response, _ = utaa.create_running_workflow_step( - running_workflow_id=response["id"], step="step-1" - ) - rwfsid = response["id"] lp: LaunchParameters = LaunchParameters( project_id=TEST_PROJECT_ID, name="Test Instance", launching_user_name="dlister", launching_user_api_token="1234567890", running_workflow_id=rwfid, - running_workflow_step_id=rwfsid, + step_name="step-1", specification={ "collection": "workflow-engine-unit-test-jobs", "job": "nop-fail", }, - specification_variables={}, ) # Act - result = launcher.launch(lp) + result = launcher.launch(launch_parameters=lp) # Assert assert result.error_num == 0 @@ -108,26 +98,22 @@ def test_launch_smiles_to_file(basic_launcher): variables={}, ) rwfid = response["id"] - response, _ = utaa.create_running_workflow_step( - running_workflow_id=response["id"], step="step-1" - ) - rwfsid = response["id"] lp: LaunchParameters = LaunchParameters( project_id=TEST_PROJECT_ID, name="Test Instance", launching_user_name="dlister", launching_user_api_token="1234567890", running_workflow_id=rwfid, - running_workflow_step_id=rwfsid, + step_name="step-1", specification={ "collection": "workflow-engine-unit-test-jobs", "job": "smiles-to-file", }, - specification_variables={"smiles": "C1=CC=CC=C1", "outputFile": "output.smi"}, + variables={"smiles": "C1=CC=CC=C1", "outputFile": "output.smi"}, ) # Act - result = launcher.launch(lp) + result = launcher.launch(launch_parameters=lp) # Assert assert result.error_num == 0 diff --git a/tests/test_workflow_engine_examples.py b/tests/test_workflow_engine_examples.py index 8fa25b9..9d07f13 100644 --- a/tests/test_workflow_engine_examples.py +++ b/tests/test_workflow_engine_examples.py @@ -1,6 +1,7 @@ import os import time from datetime import datetime, timezone +from pprint import pprint from typing import Any import pytest @@ -83,6 +84,7 @@ def start_workflow( variables=variables, level=ValidationLevel.RUN, ) + print("vr_result", vr_result) assert vr_result.error_num == 0 # 3. response = da.create_running_workflow( @@ -128,6 +130,7 @@ def wait_for_workflow( # are the responsibility of the caller. attempts = 0 done = False + response = None while not done: response, _ = da.get_running_workflow(running_workflow_id=r_wfid) if response["done"]: @@ -139,6 +142,7 @@ def wait_for_workflow( time.sleep(completion_poll_period_s) # When we get here the workflow must have finished (not timed-out), # and it must have passed (or failed) according the the caller's expectation. + assert response assert response["done"] assert response["success"] == expect_success @@ -204,44 +208,17 @@ def test_workflow_engine_example_smiles_to_file(basic_engine): assert project_file_exists(output_file) -def test_workflow_engine_shortcut_example_1(basic_engine): +def test_workflow_engine_simple_python_molprops(basic_engine): # Arrange md, da = basic_engine - # Make sure files that should be generated by the test - # do not exist before we run the test. - output_file_a = "a.sdf" - assert not project_file_exists(output_file_a) - output_file_b = "b.sdf" - assert not project_file_exists(output_file_b) - - # Act - r_wfid = start_workflow(md, da, "shortcut-example-1", {}) - - # Assert - wait_for_workflow(da, r_wfid) - # Additional, detailed checks... - # Check we only have one RunningWorkflowStep, and it succeeded - response = da.get_running_workflow_steps(running_workflow_id=r_wfid) - assert response["count"] == 2 - assert response["running_workflow_steps"][0]["done"] - assert response["running_workflow_steps"][0]["success"] - assert response["running_workflow_steps"][1]["done"] - assert response["running_workflow_steps"][1]["success"] - # This test should generate a file in the simulated project directory - assert project_file_exists(output_file_a) - assert project_file_exists(output_file_b) + da.mock_get_running_workflow_step_output_values_for_output( + step_name="step2", + output_variable="outputFile", + output="step1.out.smi", + ) -def test_workflow_engine_simple_python_molprops(basic_engine): - # Arrange - md, da = basic_engine - # Make sure files that should be generated by the test - # do not exist before we run the test. - output_file_1 = "step1.out.smi" - assert not project_file_exists(output_file_1) - output_file_2 = "step2.out.smi" - assert not project_file_exists(output_file_2) - # And create the test's input file. + # Create the test's input file. input_file_1 = "input1.smi" input_file_1_content = """O=C(CSCc1ccc(Cl)s1)N1CCC(O)CC1 RDKit 3D @@ -293,9 +270,22 @@ def test_workflow_engine_simple_python_molprops(basic_engine): ) as input_file: input_file.writelines(input_file_1_content) + # Make sure files that should be generated by the test + # do not exist before we run the test. + output_file_1 = "results.smi" + assert not project_file_exists(output_file_1) + output_file_2 = "clustered-results.smi" + assert not project_file_exists(output_file_2) + # Act r_wfid = start_workflow( - md, da, "simple-python-molprops", {"candidateMolecules": input_file_1} + md, + da, + "simple-python-molprops", + { + "candidateMolecules": input_file_1, + "clusteredMolecules": "clustered-results.smi", + }, ) # Assert @@ -308,14 +298,20 @@ def test_workflow_engine_simple_python_molprops(basic_engine): assert response["running_workflow_steps"][0]["success"] assert response["running_workflow_steps"][1]["done"] assert response["running_workflow_steps"][1]["success"] - # This test should generate a file in the simulated project directory - assert project_file_exists(output_file_1) + # This test should generate the expected file in the simulated project directory assert project_file_exists(output_file_2) def test_workflow_engine_simple_python_molprops_with_options(basic_engine): # Arrange md, da = basic_engine + + da.mock_get_running_workflow_step_output_values_for_output( + step_name="step1", + output_variable="outputFile", + output="step1.out.smi", + ) + # Make sure files that should be generated by the test # do not exist before we run the test. output_file_1 = "step1.out.smi" @@ -381,6 +377,7 @@ def test_workflow_engine_simple_python_molprops_with_options(basic_engine): "simple-python-molprops-with-options", { "candidateMolecules": input_file_1, + "clusteredMolecules": output_file_2, "rdkitPropertyName": "prop", "rdkitPropertyValue": 1.2, }, @@ -401,22 +398,26 @@ def test_workflow_engine_simple_python_molprops_with_options(basic_engine): assert project_file_exists(output_file_2) -def test_workflow_engine_simple_python_parallel(basic_engine): +def test_workflow_engine_simple_python_fanout(basic_engine): # Arrange md, da = basic_engine + + da.mock_get_running_workflow_step_output_values_for_output( + step_name="first-step", + output_variable="outputBase", + output=["chunk_1.smi", "chunk_2.smi"], + ) + # Make sure files that should be generated by the test # do not exist before we run the test. - output_file_first = "first-step.out.smi" + output_file_first = "chunk_1.smi" + output_file_second = "chunk_2.smi" assert not project_file_exists(output_file_first) - output_file_pa = "parallel-step-a.out.smi" - assert not project_file_exists(output_file_pa) - output_file_pb = "parallel-step-b.out.smi" - assert not project_file_exists(output_file_pb) - output_file_final = "final-step.out.smi" - assert not project_file_exists(output_file_final) + assert not project_file_exists(output_file_second) # And create the test's input file. input_file_1 = "input1.smi" - input_file_1_content = "O=C(CSCc1ccc(Cl)s1)N1CCC(O)CC1" + input_file_1_content = """O=C(CSCc1ccc(Cl)s1)N1CCC(O)CC1 + COCN1C(=O)NC(C)(C)C1=O""" with open( f"{EXECUTION_DIRECTORY}/{input_file_1}", mode="wt", encoding="utf8" ) as input_file: @@ -426,7 +427,7 @@ def test_workflow_engine_simple_python_parallel(basic_engine): r_wfid = start_workflow( md, da, - "simple-python-parallel", + "simple-python-fanout", {"candidateMolecules": input_file_1}, ) @@ -435,16 +436,13 @@ def test_workflow_engine_simple_python_parallel(basic_engine): # Additional, detailed checks... # Check we only have one RunningWorkflowStep, and it succeeded response = da.get_running_workflow_steps(running_workflow_id=r_wfid) + print("response") + pprint(response) - assert response["count"] == 4 + assert response["count"] == 3 assert response["running_workflow_steps"][0]["done"] assert response["running_workflow_steps"][0]["success"] assert response["running_workflow_steps"][1]["done"] assert response["running_workflow_steps"][1]["success"] assert response["running_workflow_steps"][2]["done"] assert response["running_workflow_steps"][2]["success"] - assert response["running_workflow_steps"][3]["done"] - assert response["running_workflow_steps"][3]["success"] - # This test should generate a file in the simulated project directory - assert project_file_exists(output_file_first) - assert project_file_exists(output_file_final) diff --git a/tests/test_workflow_validator_for_create_level.py b/tests/test_workflow_validator_for_create_level.py index ad429c9..18f621f 100644 --- a/tests/test_workflow_validator_for_create_level.py +++ b/tests/test_workflow_validator_for_create_level.py @@ -26,10 +26,10 @@ def test_validate_minimal(): def test_validate_example_nop_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-nop-fail.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -46,10 +46,10 @@ def test_validate_example_nop_file(): def test_validate_example_smiles_to_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-smiles-to-file.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -66,10 +66,10 @@ def test_validate_example_smiles_to_file(): def test_validate_example_two_step_nop(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-two-step-nop.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -86,10 +86,10 @@ def test_validate_example_two_step_nop(): def test_validate_shortcut_example_1(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "shortcut-example-1.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -106,10 +106,10 @@ def test_validate_shortcut_example_1(): def test_validate_simple_python_molprops(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -126,12 +126,12 @@ def test_validate_simple_python_molprops(): def test_validate_simple_python_molprops_with_options(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops-with-options.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow diff --git a/tests/test_workflow_validator_for_run_level.py b/tests/test_workflow_validator_for_run_level.py index 2df1630..e76239d 100644 --- a/tests/test_workflow_validator_for_run_level.py +++ b/tests/test_workflow_validator_for_run_level.py @@ -11,10 +11,10 @@ def test_validate_example_nop_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-nop-fail.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -31,10 +31,10 @@ def test_validate_example_nop_file(): def test_validate_duplicate_step_names(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "duplicate-step-names.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -51,10 +51,10 @@ def test_validate_duplicate_step_names(): def test_validate_example_smiles_to_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-smiles-to-file.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -62,6 +62,7 @@ def test_validate_example_smiles_to_file(): error = WorkflowValidator.validate( level=ValidationLevel.RUN, workflow_definition=workflow, + variables={"smiles": "C", "outputFile": "blob.smi"}, ) # Assert @@ -71,10 +72,10 @@ def test_validate_example_smiles_to_file(): def test_validate_example_two_step_nop(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-two-step-nop.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -91,10 +92,10 @@ def test_validate_example_two_step_nop(): def test_validate_shortcut_example_1(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "shortcut-example-1.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -111,10 +112,10 @@ def test_validate_shortcut_example_1(): def test_validate_simple_python_molprops(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow variables = {"candidateMolecules": "input.sdf", "clusteredMolecules": "output.sdf"} @@ -133,17 +134,19 @@ def test_validate_simple_python_molprops(): def test_validate_simple_python_molprops_with_options_when_missing_required(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops-with-options.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow variables = { "candidateMolecules": "input.sdf", "clusteredMolecules": "output.sdf", + "outputFile": "results.sdf", + "rdkitPropertyName": "name", } # Act @@ -154,7 +157,7 @@ def test_validate_simple_python_molprops_with_options_when_missing_required(): ) # Assert - assert error.error_num == 7 + assert error.error_num == 8 assert error.error_msg == [ "Missing workflow variable values for: rdkitPropertyValue" ] @@ -162,12 +165,12 @@ def test_validate_simple_python_molprops_with_options_when_missing_required(): def test_validate_simple_python_molprops_with_options(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops-with-options.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow variables = { @@ -175,6 +178,7 @@ def test_validate_simple_python_molprops_with_options(): "clusteredMolecules": "output.sdf", "rdkitPropertyName": "col1", "rdkitPropertyValue": 123, + "outputFile": "results.sdf", } # Act @@ -191,10 +195,10 @@ def test_validate_simple_python_molprops_with_options(): def test_validate_simple_python_molprops_with_missing_input(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow variables = {"clusteredMolecules": "output.sdf"} @@ -207,87 +211,20 @@ def test_validate_simple_python_molprops_with_missing_input(): ) # Assert - assert error.error_num == 7 + assert error.error_num == 8 assert error.error_msg == [ "Missing workflow variable values for: candidateMolecules" ] -def test_validate_duplicate_workflow_variable_names(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "duplicate-workflow-variable-names.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 6 - assert error.error_msg == ["Duplicate workflow variable names found: x"] - - -def test_validate_simple_python_parallel(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "simple-python-parallel.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 0 - - -def test_validate_replicate_using_undeclared_input(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "replicate-using-undeclared-input.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 7 - assert error.error_msg == [ - "Replicate input variable is not declared: y (step=step-2)" - ] - - def test_validate_duplicate_step_output_variable_names(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", - "duplicate-step-output-variable-names.yaml", + "duplicate-step-input-output-variable-names.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -300,5 +237,5 @@ def test_validate_duplicate_step_output_variable_names(): # Assert assert error.error_num == 3 assert error.error_msg == [ - "Duplicate step output variable: outputFile (step=step-1)" + "Duplicate step output variable: outputFile (step=step-2)" ] diff --git a/tests/test_workflow_validator_for_tag_level.py b/tests/test_workflow_validator_for_tag_level.py index 4445502..4c1719d 100644 --- a/tests/test_workflow_validator_for_tag_level.py +++ b/tests/test_workflow_validator_for_tag_level.py @@ -11,10 +11,10 @@ def test_validate_example_nop_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-nop-fail.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -31,10 +31,10 @@ def test_validate_example_nop_file(): def test_validate_duplicate_step_names(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "duplicate-step-names.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -51,10 +51,10 @@ def test_validate_duplicate_step_names(): def test_validate_example_smiles_to_file(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-smiles-to-file.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -71,10 +71,10 @@ def test_validate_example_smiles_to_file(): def test_validate_example_two_step_nop(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "example-two-step-nop.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -91,10 +91,10 @@ def test_validate_example_two_step_nop(): def test_validate_shortcut_example_1(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "shortcut-example-1.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -109,33 +109,12 @@ def test_validate_shortcut_example_1(): assert error.error_msg is None -def test_validate_simple_python_parallel(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "simple-python-parallel.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 0 - - def test_validate_simple_python_molprops(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops.yaml" ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -152,12 +131,12 @@ def test_validate_simple_python_molprops(): def test_validate_simple_python_molprops_with_options(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", "simple-python-molprops-with-options.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -172,60 +151,14 @@ def test_validate_simple_python_molprops_with_options(): assert error.error_msg is None -def test_validate_duplicate_workflow_variable_names(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "duplicate-workflow-variable-names.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 6 - assert error.error_msg == ["Duplicate workflow variable names found: x"] - - -def test_validate_replicate_using_undeclared_input(): - # Arrange - workflow_file: str = os.path.join( - os.path.dirname(__file__), - "workflow-definitions", - "replicate-using-undeclared-input.yaml", - ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: - workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) - assert workflow - - # Act - error = WorkflowValidator.validate( - level=ValidationLevel.TAG, - workflow_definition=workflow, - ) - - # Assert - assert error.error_num == 7 - assert error.error_msg == [ - "Replicate input variable is not declared: y (step=step-2)" - ] - - def test_validate_duplicate_step_output_variable_names(): # Arrange - workflow_file: str = os.path.join( + workflow_filename: str = os.path.join( os.path.dirname(__file__), "workflow-definitions", - "duplicate-step-output-variable-names.yaml", + "duplicate-step-input-output-variable-names.yaml", ) - with open(workflow_file, "r", encoding="utf8") as workflow_file: + with open(workflow_filename, "r", encoding="utf8") as workflow_file: workflow: dict[str, Any] = yaml.load(workflow_file, Loader=yaml.FullLoader) assert workflow @@ -238,5 +171,5 @@ def test_validate_duplicate_step_output_variable_names(): # Assert assert error.error_num == 3 assert error.error_msg == [ - "Duplicate step output variable: outputFile (step=step-1)" + "Duplicate step output variable: outputFile (step=step-2)" ] diff --git a/tests/wapi_adapter.py b/tests/wapi_adapter.py index 322507c..c283ee1 100644 --- a/tests/wapi_adapter.py +++ b/tests/wapi_adapter.py @@ -442,7 +442,7 @@ def realise_outputs( # Methods not declared in the ABC def mock_get_running_workflow_step_output_values_for_output( - self, *, step_name: str, output_variable: str, output: list[str] + self, *, step_name: str, output_variable: str, output: list[str] | str ) -> None: """Sets the output response for a step. Limitation is that there can only be one record for each step name @@ -450,7 +450,6 @@ def mock_get_running_workflow_step_output_values_for_output( to check the output variable name matches.""" assert isinstance(step_name, str) assert isinstance(output_variable, str) - assert isinstance(output, list) UnitTestWorkflowAPIAdapter.lock.acquire() with open(_MOCK_STEP_OUTPUT_FILE, "rb") as pickle_file: diff --git a/tests/workflow-definitions/duplicate-step-output-variable-names.yaml b/tests/workflow-definitions/duplicate-step-input-output-variable-names.yaml similarity index 52% rename from tests/workflow-definitions/duplicate-step-output-variable-names.yaml rename to tests/workflow-definitions/duplicate-step-input-output-variable-names.yaml index 5a371a2..3ba3926 100644 --- a/tests/workflow-definitions/duplicate-step-output-variable-names.yaml +++ b/tests/workflow-definitions/duplicate-step-input-output-variable-names.yaml @@ -3,17 +3,8 @@ kind: DataManagerWorkflow kind-version: "2025.2" name: duplicate-step-output-variable-names description: A workflow where step-1 has duplicate output variable names -variable-mapping: - inputs: - - name: x - outputs: - - name: y - from: - step: step-2 - output: outputFile steps: - - name: step-1 description: Add column 1 specification: @@ -23,18 +14,16 @@ steps: variables: name: "col1" value: 123 - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: __step1__out.smi - - output: outputFile - as: __step1__out.smi + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules + - variable: inputFile + from-workflow: + variable: candidateMolecules + in: + - inputFile + - inputFile - name: step-2 description: Add column 2 @@ -45,11 +34,11 @@ steps: variables: name: "col2" value: "999" - inputs: - - input: inputFile - from: - step: step1 - output: outputFile - outputs: - - output: outputFile - as: __step2__out.smi + variable-mapping: + - variable: inputFile + from-step: + name: step1 + variable: outputFile + out: + - outputFile + - outputFile diff --git a/tests/workflow-definitions/duplicate-workflow-variable-names.yaml b/tests/workflow-definitions/duplicate-workflow-variable-names.yaml deleted file mode 100644 index f524c44..0000000 --- a/tests/workflow-definitions/duplicate-workflow-variable-names.yaml +++ /dev/null @@ -1,50 +0,0 @@ ---- -kind: DataManagerWorkflow -kind-version: "2025.2" -name: duplicate-workflow-variable-names -description: A workflow with a duplicate variable name in the input and output -variable-mapping: - inputs: - - name: x - outputs: - - name: x - from: - step: step2 - output: outputFile - -steps: - -- name: step1 - description: Add column 1 - specification: - collection: workflow-engine-unit-test-jobs - job: rdkit-molprops - version: "1.0.0" - variables: - name: "col1" - value: 123 - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: __step1__out.smi - -- name: step2 - description: Add column 2 - specification: - collection: workflow-engine-unit-test-jobs - job: cluster-butina - version: "1.0.0" - variables: - name: "col2" - value: "999" - inputs: - - input: inputFile - from: - step: step1 - output: outputFile - outputs: - - output: outputFile - as: __step2__out.smi diff --git a/tests/workflow-definitions/example-smiles-to-file.yaml b/tests/workflow-definitions/example-smiles-to-file.yaml index b7dc70c..018d90c 100644 --- a/tests/workflow-definitions/example-smiles-to-file.yaml +++ b/tests/workflow-definitions/example-smiles-to-file.yaml @@ -13,3 +13,12 @@ steps: collection: workflow-engine-unit-test-jobs job: smiles-to-file version: "1.0.0" + variable-mapping: + - variable: outputFile + from-workflow: + variable: outputFile + - variable: smiles + from-workflow: + variable: smiles + out: + - outputFile diff --git a/tests/workflow-definitions/replicate-using-undeclared-input.yaml b/tests/workflow-definitions/replicate-using-undeclared-input.yaml index 883ec62..447521b 100644 --- a/tests/workflow-definitions/replicate-using-undeclared-input.yaml +++ b/tests/workflow-definitions/replicate-using-undeclared-input.yaml @@ -3,17 +3,8 @@ kind: DataManagerWorkflow kind-version: "2025.2" name: replicate-using-undeclared-input description: A workflow that replicates from a variable that's not declared -variable-mapping: - inputs: - - name: x - outputs: - - name: y - from: - step: step2 - output: outputFile steps: - - name: step-1 description: Add column 1 specification: @@ -23,13 +14,12 @@ steps: variables: name: "col1" value: 123 - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: __step-1__out.smi + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules + in: + - outputFile - name: step-2 description: Add column 2 @@ -42,12 +32,11 @@ steps: value: "999" replicate: using: - input: y - inputs: - - input: inputFile - from: - step: step-1 - output: outputFile - outputs: - - output: outputFile - as: __step-2__out.smi + variable: y + variable-mapping: + - variable: inputFile + from-step: + name: step-1 + variable: outputFile + out: + - outputFile diff --git a/tests/workflow-definitions/shortcut-example-1.yaml b/tests/workflow-definitions/shortcut-example-1.yaml index e5b719d..0b6c2c3 100644 --- a/tests/workflow-definitions/shortcut-example-1.yaml +++ b/tests/workflow-definitions/shortcut-example-1.yaml @@ -12,9 +12,6 @@ steps: collection: workflow-engine-unit-test-jobs job: shortcut-example-1-process-a version: "1.0.0" - outputs: - - output: 'outputFile' - as: 'a.sdf' - name: example-1-step-2 description: The first step @@ -22,11 +19,10 @@ steps: collection: workflow-engine-unit-test-jobs job: shortcut-example-1-process-b version: "1.0.0" - inputs: - - input: 'inputFile' - from: - step: example-1-step-1 - output: 'outputFile' - outputs: - - output: 'outputFile' - as: 'b.sdf' + variable-mapping: + - variable: inputFile + from-step: + name: example-1-step-1 + variable: outputFile + out: + - outputFile diff --git a/tests/workflow-definitions/simple-python-fanout.yaml b/tests/workflow-definitions/simple-python-fanout.yaml new file mode 100644 index 0000000..0a50216 --- /dev/null +++ b/tests/workflow-definitions/simple-python-fanout.yaml @@ -0,0 +1,40 @@ +--- +kind: DataManagerWorkflow +kind-version: "2025.2" +name: python-workflow +description: >- + A simple parallel workflow. Input is split into N chunks and N processes of the same job is started + +steps: +- name: first-step + description: Split an input file + specification: + collection: workflow-engine-unit-test-jobs + job: splitsmiles + version: "1.0.0" + variables: + name: count + value: "1" + outputBase: chunk + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules + +- name: parallel-step + description: Add some params + specification: + collection: workflow-engine-unit-test-jobs + job: append-col + version: "1.0.0" + variables: + name: desc1 + value: "777" + outputFile: results.smi + variable-mapping: + - variable: inputFile + from-step: + name: first-step + variable: outputBase + out: + - outputFile diff --git a/tests/workflow-definitions/simple-python-molprops-with-options.yaml b/tests/workflow-definitions/simple-python-molprops-with-options.yaml index 2fc1155..9ef80e5 100644 --- a/tests/workflow-definitions/simple-python-molprops-with-options.yaml +++ b/tests/workflow-definitions/simple-python-molprops-with-options.yaml @@ -4,89 +4,25 @@ kind-version: "2025.2" name: python-workflow description: A simple python experimental workflow -# Some meaningless variables. -# Just to make sure the decoder accepts this. -# The Workflow engin eis not (yet) interested in this block. -variables: - inputs: - type: object - required: - - inputFile - properties: - inputFile: - title: Molecules to pick from - mime-types: - - squonk/x-smiles - type: file - seeds: - title: Molecules that are already picked - mime-types: - - squonk/x-smiles - type: file - multiple: true - outputs: - type: object - properties: - outputFile: - title: Output file - mime-types: - - chemical/x-csv - creates: '{{ outputFile }}' - type: file - options: - type: object - required: - - count - properties: - outputFile: - title: Output file name - type: string - pattern: "^[A-Za-z0-9_/\\.\\-]+$" - default: diverse.smi - count: - title: Number of molecules to pick - type: integer - minimum: 1 - threshold: - title: Similarity threshold - type: number - minimum: 0 - maximum: 1 - -variable-mapping: - inputs: - - name: candidateMolecules - outputs: - - name: clusteredMolecules - from: - step: step2 - output: outputFile - options: - - name: rdkitPropertyName - default: name - as: - - option: name - step: step1 - - name: rdkitPropertyValue - as: - - option: value - step: step1 - steps: - - name: step1 description: Add column 1 specification: collection: workflow-engine-unit-test-jobs job: rdkit-molprops version: "1.0.0" - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: step1.out.smi + variables: + outputFile: step1.out.smi + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules + - variable: name + from-workflow: + variable: rdkitPropertyName + - variable: value + from-workflow: + variable: rdkitPropertyValue - name: step2 description: Add column 2 @@ -97,11 +33,13 @@ steps: variables: name: "col2" value: "999" - inputs: - - input: inputFile - from: - step: step1 - output: outputFile - outputs: - - output: outputFile - as: step2.out.smi + variable-mapping: + - variable: inputFile + from-step: + name: step1 + variable: outputFile + - variable: outputFile + from-workflow: + variable: clusteredMolecules + out: + - outputFile diff --git a/tests/workflow-definitions/simple-python-molprops.yaml b/tests/workflow-definitions/simple-python-molprops.yaml index dddb080..ba0d1d0 100644 --- a/tests/workflow-definitions/simple-python-molprops.yaml +++ b/tests/workflow-definitions/simple-python-molprops.yaml @@ -3,17 +3,8 @@ kind: DataManagerWorkflow kind-version: "2025.2" name: python-workflow description: A simple python experimental workflow -variable-mapping: - inputs: - - name: candidateMolecules - outputs: - - name: clusteredMolecules - from: - step: step2 - output: outputFile steps: - - name: step1 description: Add column 1 specification: @@ -23,13 +14,11 @@ steps: variables: name: "col1" value: 123 - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: step1.out.smi + outputFile: "results.smi" + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules - name: step2 description: Add column 2 @@ -40,11 +29,13 @@ steps: variables: name: "col2" value: "999" - inputs: - - input: inputFile - from: - step: step1 - output: outputFile - outputs: - - output: outputFile - as: step2.out.smi + variable-mapping: + - variable: inputFile + from-step: + name: step1 + variable: outputFile + - variable: outputFile + from-workflow: + variable: clusteredMolecules + out: + - outputFile diff --git a/tests/workflow-definitions/simple-python-parallel.yaml b/tests/workflow-definitions/simple-python-parallel.yaml index e620cda..c1f5c8f 100644 --- a/tests/workflow-definitions/simple-python-parallel.yaml +++ b/tests/workflow-definitions/simple-python-parallel.yaml @@ -3,18 +3,8 @@ kind: DataManagerWorkflow kind-version: "2025.2" name: python-workflow description: A simple branching workflow -variable-mapping: - inputs: - - name: candidateMolecules - outputs: - - name: clusteredMolecules - from: - step: final-step - output: outputFile - steps: - - name: first-step description: Create inputs specification: @@ -24,13 +14,10 @@ steps: variables: name: "unnecessary" value: "0" - inputs: - - input: inputFile - from: - workflow-input: candidateMolecules - outputs: - - output: outputFile - as: first-step.out.smi + variable-mapping: + - variable: inputFile + from-workflow: + variable: candidateMolecules - name: parallel-step-a description: Add some params @@ -41,14 +28,11 @@ steps: variables: name: "desc1" value: "777" - inputs: - - input: inputFile - from: - step: first-step - output: outputFile - outputs: - - output: outputFile - as: parallel-step-a.out.smi + variable-mapping: + - variable: inputFile + from-step: + name: first-step + variable: outputFile - name: parallel-step-b description: Add some other params @@ -59,14 +43,11 @@ steps: variables: name: "desc2" value: "999" - inputs: - - input: inputFile - from: - step: first-step - output: outputFile - outputs: - - output: outputFile - as: parallel-step-b.out.smi + variable-mapping: + - variable: inputFile + from-step: + name: first-step + variable: outputFile - name: final-step description: Collate results @@ -83,6 +64,5 @@ steps: from: step: parallel-step-b output: outputFile - outputs: - - output: outputFile - as: final-step.out.smi + out: + - outputFile diff --git a/tests/workflow-definitions/step-specification-variable-names.yaml b/tests/workflow-definitions/step-specification-variable-names.yaml index e899b7f..99ae052 100644 --- a/tests/workflow-definitions/step-specification-variable-names.yaml +++ b/tests/workflow-definitions/step-specification-variable-names.yaml @@ -5,7 +5,6 @@ name: step-variables description: Test a lot of variables whose format is supported steps: - - name: step-1 specification: collection: a diff --git a/tests/workflow-definitions/workflow-options.yaml b/tests/workflow-definitions/workflow-options.yaml deleted file mode 100644 index 9e742fe..0000000 --- a/tests/workflow-definitions/workflow-options.yaml +++ /dev/null @@ -1,54 +0,0 @@ ---- -kind: DataManagerWorkflow -kind-version: "2025.2" -name: workflow-options -description: Illustrate the use of workflow options -variable-mapping: - options: - - name: variableWithoutDefault - as: - - option: variable1 - step: step-1 - - option: variable2 - step: step-2 - - name: variableWithIntegerDefault - default: 7 - as: - - option: variable3 - step: step-1 - - name: variableWithIntegerDefaultAndRange - default: 7 - minimum: 1 - maximum: 8 - as: - - option: variable4 - step: step-1 - - name: variableWithFloatDefault - default: 1.0 - as: - - option: variable5 - step: step-1 - - name: variableWithBooleanDefault - default: true - as: - - option: variable6 - step: step-1 - - name: variableWithStringDefault - default: Hello, World! - as: - - option: variable7 - step: step-1 - -steps: - -- name: step-1 - specification: - collection: a - job: b - version: '1.0.0' - -- name: step-2 - specification: - collection: a - job: b - version: '1.0.0' diff --git a/workflow/decoder.py b/workflow/decoder.py index 55512e6..daf6a3d 100644 --- a/workflow/decoder.py +++ b/workflow/decoder.py @@ -4,6 +4,7 @@ """ import os +from dataclasses import dataclass from typing import Any import jsonschema @@ -23,6 +24,14 @@ assert _WORKFLOW_SCHEMA +@dataclass +class Translation: + """A source ("in_") to destination ("out") variable map.""" + + in_: str + out: str + + def validate_schema(workflow: dict[str, Any]) -> str | None: """Checks the Workflow Definition against the built-in schema. If there's an error the error text is returned, otherwise None. @@ -52,6 +61,16 @@ def get_steps(definition: dict[str, Any]) -> list[dict[str, Any]]: return response +def get_step(definition: dict[str, Any], name: str) -> dict[str, Any]: + """Given a Workflow definition this function returns a named step + (if it exists).""" + steps: list[dict[str, Any]] = get_steps(definition) + for step in steps: + if step["name"] == name: + return step + return {} + + def get_name(definition: dict[str, Any]) -> str: """Given a Workflow definition this function returns its name.""" return str(definition.get("name", "")) @@ -62,23 +81,18 @@ def get_description(definition: dict[str, Any]) -> str | None: return definition.get("description") -def get_variable_names(definition: dict[str, Any]) -> list[str]: +def get_workflow_variable_names(definition: dict[str, Any]) -> set[str]: """Given a Workflow definition this function returns all the names of the - variables defined at the workflow level. These are the 'names' for inputs, - outputs and options. This function DOES NOT de-duplicate names, - that is the role of the validator.""" - wf_variable_names: list[str] = [] - variables: dict[str, Any] | None = definition.get("variable-mapping") - if variables: - wf_variable_names.extend( - input_variable["name"] for input_variable in variables.get("inputs", []) - ) - wf_variable_names.extend( - output_variable["name"] for output_variable in variables.get("outputs", []) - ) - wf_variable_names.extend( - option_variable["name"] for option_variable in variables.get("options", []) - ) + variables that need to be defined at the workflow level. These are the 'variables' + used in every steps' variabale-mapping block. + """ + wf_variable_names: set[str] = set() + steps: list[dict[str, Any]] = get_steps(definition) + for step in steps: + if v_map := step.get("variable-mapping"): + for v in v_map: + if "from-workflow" in v: + wf_variable_names.add(v["from-workflow"]["variable"]) return wf_variable_names @@ -86,178 +100,66 @@ def get_step_output_variable_names( definition: dict[str, Any], step_name: str ) -> list[str]: """Given a Workflow definition and a Step name this function returns all the names - of the output variables defined at the Step level. This function DOES NOT - de-duplicate names, that is the role of the validator.""" + of the output variables defined at the Step level. These are the names + of variables that have files assocaited with them that need copying to + the Project directory (from the Instance).""" variable_names: list[str] = [] steps: list[dict[str, Any]] = get_steps(definition) for step in steps: if step["name"] == step_name: - variable_names.extend( - output["output"] for output in step.get("outputs", []) - ) + variable_names.extend(step.get("out", [])) return variable_names def get_step_input_variable_names( definition: dict[str, Any], step_name: str ) -> list[str]: - """Given a Workflow definition and a Step name (expected to exist) - this function returns all the names of the input - variables defined at the step level.""" + """Given a Workflow definition and a Step name this function returns all the names + of the input variables defined at the Step level. These are the names + of variables that have files assocaited with them that need copying to + the Instance directory (from the Project).""" variable_names: list[str] = [] steps: list[dict[str, Any]] = get_steps(definition) for step in steps: if step["name"] == step_name: - variable_names.extend(input["input"] for input in step.get("inputs", [])) + variable_names.extend(step.get("in", [])) return variable_names -def get_workflow_job_input_names_for_step( - definition: dict[str, Any], name: str -) -> list[str]: - """Given a Workflow definition and a step name we return a list of step Job input - variable names the step expects. To do this we iterate through the step's - inputs to find those that are declared 'from->workflow-input'.""" - inputs: list[str] = [] - for step in definition.get("steps", {}): - if step["name"] == name and "inputs" in step: - # Find all the workflow inputs. - # This gives us the name of the workflow input variable - # and the name of the step input (Job) variable. - inputs.extend( - step_input["input"] - for step_input in step["inputs"] - if "from" in step_input and "workflow-input" in step_input["from"] - ) - return inputs - - -def workflow_step_has_outputs(definition: dict[str, Any], name: str) -> bool: - """Given a Workflow definition and a step name we return a boolean - that is true if the step produces outputs.""" - wf_outputs = definition.get("variable-mapping", {}).get("outputs", {}) - return any( - "from" in output and "step" in output["from"] and output["from"]["step"] == name - for output in wf_outputs - ) - - -def set_variables_from_options_for_step( - definition: dict[str, Any], variables: dict[str, Any], step_name: str -) -> dict[str, Any]: - """Given a Workflow definition, an existing map of variables and values, - and a step name this function returns a new set of variables by adding - variables and values that are required for the step that have been defined in the - workflow's variables->options block. - - As an example, the following option, which is used if the step name is 'step1', - expects 'rdkitPropertyName' to exist in the current set of variables, - and should be copied into the new set of variables using the key 'propertyName' - and value that is the same as the one provided in the original 'rdkitPropertyName': - - - name: rdkitPropertyName - default: propertyName - as: - - option: propertyName - step: step1 - - And ... in the above example ... if the input variables map - is {"rdkitPropertyName": "rings"} then the output map would be - {"rdkitPropertyName": "rings", "propertyName": "rings"} - - The function returns a new variable map, with and an optional error string on error. - """ - - assert isinstance(definition, dict) - assert step_name - - result = {} - options = definition.get("variable-mapping", {}).get("options", []) - - for opt in options: - for step_alias in opt["as"]: - if step_alias["step"] == step_name: - result[step_alias["option"]] = variables[opt["name"]] - # can break the loop because a variable can be a step - # variable only once - break - - # Success... - return result - - -def get_required_variable_names(definition: dict[str, Any]) -> list[str]: - """Given a Workflow definition this function returns all the names of the - variables that are required to be defined when it is RUN - i.e. - all those the user needs to provide.""" - required_variables: list[str] = [] - variables: dict[str, Any] | None = definition.get("variable-mapping") - if variables: - # All inputs are required (no defaults atm)... - required_variables.extend( - input_variable["name"] for input_variable in variables.get("inputs", []) - ) - # Options without defaults are required... - # It is the role of the engine to provide the actual default for those - # that have defaults but no user-defined value. - required_variables.extend( - option_variable["name"] - for option_variable in variables.get("options", []) - if "default" not in option_variable - ) - return required_variables - - -def set_step_variables( - *, - workflow: dict[str, Any], - inputs: list[dict[str, Any]], - outputs: list[dict[str, Any]], - previous_step_outputs: list[dict[str, Any]], - workflow_variables: dict[str, Any], - step_name: str, -) -> dict[str, Any]: - """Prepare input- and output variables for the following step. - - Inputs are defined in step definition but their values may - come from previous step outputs. - """ - result = {} - - for item in inputs: - p_key = item["input"] - p_val = "" - val = item["from"] - if "workflow-input" in val.keys(): - p_val = workflow_variables[val["workflow-input"]] - result[p_key] = p_val - elif "step" in val.keys(): - for out in previous_step_outputs: - if out["output"] == val["output"]: - p_val = out["as"] - - # this bit handles multiple inputs: if a step - # requires input from multiple steps, add them to - # the list in result dict. this is the reason for - # mypy ignore statements, mypy doesn't understand - # redefinition - if p_key in result: - if not isinstance(result[p_key], set): - result[p_key] = {result[p_key]} # type: ignore [assignment] - result[p_key].add(p_val) # type: ignore [attr-defined] - else: - result[p_key] = p_val - - for item in outputs: - p_key = item["output"] - p_val = item["as"] - result[p_key] = p_val - - options = set_variables_from_options_for_step( - definition=workflow, - variables=workflow_variables, - step_name=step_name, - ) - - result |= options - return result +def get_step_workflow_variable_mapping(*, step: dict[str, Any]) -> list[Translation]: + """Returns a list of workflow vaiable name to step variable name + Translation objects for the given step.""" + variable_mapping: list[Translation] = [] + if "variable-mapping" in step: + for v_map in step["variable-mapping"]: + if "from-workflow" in v_map: + variable_mapping.append( + Translation( + in_=v_map["from-workflow"]["variable"], out=v_map["variable"] + ) + ) + return variable_mapping + + +def get_step_prior_step_variable_mapping( + *, step: dict[str, Any] +) -> dict[str, list[Translation]]: + """Returns list of Translation objects, indexed by prior step name, + that identify source step (output) variable name to this step's (input) + variable name.""" + variable_mapping: dict[str, list[Translation]] = {} + if "variable-mapping" in step: + for v_map in step["variable-mapping"]: + if "from-step" in v_map: + step_name = v_map["from-step"]["name"] + step_variable = v_map["from-step"]["variable"] + # Tuple is "from" -> "to" + if step_name in variable_mapping: + variable_mapping[step_name].append( + Translation(in_=step_variable, out=v_map["variable"]) + ) + else: + variable_mapping[step_name] = [ + Translation(in_=step_variable, out=v_map["variable"]) + ] + return variable_mapping diff --git a/workflow/workflow-schema.yaml b/workflow/workflow-schema.yaml index 651308e..90acb29 100644 --- a/workflow/workflow-schema.yaml +++ b/workflow/workflow-schema.yaml @@ -1,5 +1,5 @@ --- -# The JSONSchema for 'Workflow' YAML files. +# The schema for 'Workflow' YAML files. # # See https://json-schema.org/understanding-json-schema/index.html @@ -33,24 +33,6 @@ properties: # and, like Jobs, has no current schema so we permit anything here. type: object additionalProperties: true - variable-mapping: - # Workflow-specific variable declarations, - # used (at the moment) to map workflow variables to steps. - type: object - additionalProperties: false - properties: - inputs: - type: array - items: - $ref: "#/definitions/workflow-input-parameter" - outputs: - type: array - items: - $ref: "#/definitions/workflow-output-parameter" - options: - type: array - items: - $ref: "#/definitions/workflow-option-parameter" required: - kind - kind-version @@ -73,151 +55,52 @@ definitions: # What does a Job specification template variable look like? # The values found in Jinja variables like '{{ x }}'. # Stuff like 'candidateMolecules' or 'clustered_molecules' - template-variable-name: + variable-name: type: string pattern: ^[a-zA-Z_][a-zA-Z0-9_]*$ - # What does a filename look like? - # We do not (at the moment) permit spaces! - file-name: - type: string - pattern: ^[a-zA-Z0-9._-]+$ - - # An input parameter has a name and a (MIME) type. - workflow-input-parameter: - type: object - additionalProperties: false - properties: - name: - $ref: '#/definitions/template-variable-name' - required: - - name - - # A workflow output parameter is essentially a file - # taken from the output of a step with a default (as) value. - workflow-output-parameter: - type: object - additionalProperties: false - properties: - name: - $ref: '#/definitions/template-variable-name' - from: - $ref: '#/definitions/from-step-output' - required: - - name - - # Declaration of a step option value from a workflow option (variable) - as-step-option: - type: object - additionalProperties: false - properties: - option: - $ref: '#/definitions/template-variable-name' - step: - $ref: '#/definitions/rfc1035-label-name' - required: - - option - - step - - - # Declaration of a value from a workflow input (variable) - from-workflow-input: - type: object - additionalProperties: false - properties: - workflow-input: - $ref: '#/definitions/template-variable-name' - required: - - workflow-input - - # Declaration of a value from another step - from-step-output: - type: object - additionalProperties: false - properties: - step: - $ref: '#/definitions/rfc1035-label-name' - output: - $ref: '#/definitions/template-variable-name' - required: - - step - - output - - # A workflow option used as a step option - workflow-option-parameter: - type: object - additionalProperties: false - properties: - name: - $ref: '#/definitions/template-variable-name' - description: - type: string - default: - oneOf: - - type: string - - type: number - - type: boolean - minimum: - type: number - maximum: - type: number - as: - type: array - items: - $ref: '#/definitions/as-step-option' - required: - - name - - as - - # A step replication control variable - # that is based on a step input variable - replicate-using-input: - type: object - additionalProperties: false - properties: - input: - $ref: '#/definitions/template-variable-name' - required: - - input - - # A Step input (from an output of a prior step) - step-input-from-step: - type: object - additionalProperties: false - properties: - input: - $ref: '#/definitions/template-variable-name' - from: - $ref: '#/definitions/from-step-output' - required: - - input - - # A Step input (from a workflow input) - step-input-from-workflow: + # A Step variable + # (whose value is derived from a variable used in a prior step) + step-variable-from-step: type: object additionalProperties: false properties: - input: - $ref: '#/definitions/template-variable-name' - from: - $ref: '#/definitions/from-workflow-input' + variable: + $ref: '#/definitions/variable-name' + from-step: + type: object + additionalProperties: false + properties: + name: + $ref: '#/definitions/rfc1035-label-name' + variable: + $ref: '#/definitions/variable-name' + required: + - name + - variable required: - - input - - from + - variable + - from-step - # A Step output (with an 'as' - a declared value) - step-output-as: + # A Step variable + # (whose value is derived from a workflow variable) + step-variable-from-workflow: type: object additionalProperties: false properties: - output: - $ref: '#/definitions/template-variable-name' - as: - $ref: '#/definitions/file-name' + variable: + $ref: '#/definitions/variable-name' + from-workflow: + type: object + additionalProperties: false + properties: + variable: + $ref: '#/definitions/variable-name' + required: + - variable required: - - output - - as - + - variable + - from-workflow # A step specification variable # (there must be at least one if a variables block is defined). @@ -260,31 +143,48 @@ definitions: additionalProperties: false properties: name: + # A unique name for the step $ref: '#/definitions/rfc1035-label-name' description: + # An optional description type: string description: A description of the step specification: + # The step Job specififcation. + # This MUST define `collection`, a 'job', and a 'version'. + # 'variables' (a map of name and value)can also be provided. + # The format of this is essentially idenical to the specification + # used when a Job is launched via the DM API. $ref: '#/definitions/step-specification' - replicate: - # Used to indicate one input variable that is used to replicate/spawn - # step instances based on the number of values generated for the variable. - type: object - additionalProperties: false - properties: - using: - $ref: '#/definitions/replicate-using-input' - inputs: + variable-mapping: + # The map of the source of the step's variables. + # all variables the step needs (that aren;t already in the specification) + # need to be declared here. They either come "from" a prior step + # or are expected in th erunning workflow variables. Here we simply + # associate every required variable to a source. type: array items: anyOf: - - $ref: "#/definitions/step-input-from-step" - - $ref: "#/definitions/step-input-from-workflow" - outputs: + - $ref: "#/definitions/step-variable-from-step" + - $ref: "#/definitions/step-variable-from-workflow" + minItems: 1 + in: + # An optional list of the step variables that are inputs. + # These are typically files, expected to be present in the Project directory, + # that need to be copied (by the DM) into the step's instance directory. type: array items: - anyOf: - - $ref: "#/definitions/step-output-as" + $ref: '#/definitions/variable-name' + minItems: 1 + out: + # An optional list of the step variables that are outputs. + # These are typically files, expected to be present in the Step Instance directory, + # when it finished (successfully), that need to be copied (by the DM) + # into the Project directory via "realise_outputs()" + type: array + items: + $ref: '#/definitions/variable-name' + minItems: 1 required: - name - specification diff --git a/workflow/workflow_abc.py b/workflow/workflow_abc.py index 2024fba..0a0acc0 100644 --- a/workflow/workflow_abc.py +++ b/workflow/workflow_abc.py @@ -27,7 +27,7 @@ class LaunchParameters: specification: dict[str, Any] # An alternative way to pass variables to the specification. # If used it will replace any 'variables' already present in the specification. - specification_variables: dict[str, Any] | None = None + variables: dict[str, Any] | None = None # A string. In DM v4 converted to a boolean and set in the # instance Pod as a label. Setting this means the Instances # that are created will not be automatically removed by the Job operator. @@ -35,28 +35,15 @@ class LaunchParameters: # The RunningWorkflow UUID. # Required if the Instance is part of a Workflow step. running_workflow_id: str | None = None - # The RunningWorkflowStep UUID. + # The RunningWorkflow's step name. # Required if the Instance is part of a Workflow step. - running_workflow_step_id: str | None = None - # A list of prior workflow steps that this step depends upon. - # - # This list gives the InstanceLauncher an opportunity to take the outputs - # of a prior instance and link them to the instance directory for the - # instance to be launched. We need to do this for Workflows because Instances - # run as apart of a Workflow do not automatically have their outputs copied (linked) - # to the Project directory when they complete. As an example, a step that relies - # on the output files from two prior steps will provide the following list: - - # - # ["r-workflow-step-a04d", "r-workflow-step-d904"] - running_workflow_step_prior_steps: list[str] | None = None - # Workflow step Job inputs (for this step Instance). These Workflow Inputs (files) - # are a list of Job input variable names for file variables where the - # file is expected to be present in the Project directory. It is simply a list of - # Job variable names. The launcher is expected to find the 'value' of these - # variables and then move the file to the instance directory. - # - # ["inputFile"] - running_workflow_step_inputs: list[str] | None = None + step_name: str | None = None + # The step replication number. + # If only one instance of the step is expected to run + # this value can be left at 0 (zero). If this step's launch + # is expected to be executed more than once the value should be + # non-zero (and unique for this workflow run). + step_replication_number: int = 0 # The application ID (a custom resource name) # used to identify the 'type' of Instance to create. # For DM Jobs this will be 'datamanagerjobs.squonk.it' @@ -75,6 +62,9 @@ class LaunchResult: # The following optional properties # may not be present if there's a launch error. # + # A running workflow step UUID + # (if the step is part of a running workflow) + running_workflow_step_id: str | None = None # The Instance UUID that was created for you. instance_id: str | None = None # The Task UUID that is handling the Instance launch @@ -94,7 +84,6 @@ def launch( self, *, launch_parameters: LaunchParameters, - **kwargs: str, ) -> LaunchResult: """Launch a (Job) Instance""" @@ -199,25 +188,6 @@ def set_running_workflow_done( """Set the success value for a RunningWorkflow Record. If not successful an error code and message should be provided.""" - @abstractmethod - def create_running_workflow_step( - self, - *, - running_workflow_id: str, - step: str, - replica: int = 0, - prior_running_workflow_step_id: str | None = None, - ) -> tuple[dict[str, Any], int]: - """Create a RunningWorkflowStep Record (from a RunningWorkflow). - If this is a replica (concurrent execution) of a step the replica - value must be set to a value greater than 0. The replica is unique - for a given step and is used to distinguish between running workflow steps - generated from the same step name.""" - # Should return: - # { - # "id": "r-workflow-step-00000000-0000-0000-0000-000000000001", - # } - @abstractmethod def get_running_workflow_step( self, *, running_workflow_step_id: str @@ -292,17 +262,6 @@ def get_running_workflow_step_by_name( # "id": "r-workflow-step-00000000-0000-0000-0000-000000000001", # }, - @abstractmethod - def set_running_workflow_step_variables( - self, - *, - running_workflow_step_id: str, - variables: dict[str, Any], - ) -> None: - """Set the variables used prior to decoding the step command for each step. - This can be used to understand step failures but will also be vital - when adding variables values to subsequent steps from prior step values.""" - @abstractmethod def set_running_workflow_step_done( self, @@ -373,16 +332,6 @@ def get_running_workflow_step_output_values_for_output( # "output": ["dir/file1.sdf", "dir/file2.sdf"] # } - @abstractmethod - def realise_outputs( - self, *, running_workflow_step_id: str - ) -> tuple[dict[str, Any], int]: - """Copy (link) the step's files as outputs into the Project directory.""" - # Should return an empty map or: - # { - # "error": "", - # } - class MessageDispatcher(ABC): """The class handling the sending of messages (on the Data Manager message bus).""" diff --git a/workflow/workflow_engine.py b/workflow/workflow_engine.py index e11d17a..606b45d 100644 --- a/workflow/workflow_engine.py +++ b/workflow/workflow_engine.py @@ -24,10 +24,10 @@ import logging import sys -from http import HTTPStatus -from typing import Any, Dict, Optional +from typing import Any, Optional -from decoder.decoder import TextEncoding, decode +import decoder.decoder as job_defintion_decoder +from decoder.decoder import TextEncoding from google.protobuf.message import Message from informaticsmatters.protobuf.datamanager.pod_message_pb2 import PodMessage from informaticsmatters.protobuf.datamanager.workflow_message_pb2 import WorkflowMessage @@ -40,9 +40,10 @@ ) from .decoder import ( - get_workflow_job_input_names_for_step, - set_step_variables, - workflow_step_has_outputs, + Translation, + get_step, + get_step_prior_step_variable_mapping, + get_step_workflow_variable_mapping, ) _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -122,28 +123,13 @@ def _handle_workflow_start_message(self, r_wfid: str) -> None: wf_response, _ = self._wapi_adapter.get_workflow(workflow_id=wfid) _LOGGER.debug("API.get_workflow(%s) returned: -\n%s", wfid, str(wf_response)) - # Now find the first step, - # and create a corresponding RunningWorkflowStep record... - first_step: Dict[str, Any] = wf_response["steps"][0] - first_step_name: str = first_step["name"] - # We need this even if the following goes wrong. - response, _ = self._wapi_adapter.create_running_workflow_step( - running_workflow_id=r_wfid, - step=first_step_name, - ) - _LOGGER.debug( - "API.create_running_workflow_step(%s, %s) returned: -\n%s", - r_wfid, - first_step_name, - str(response), - ) - assert "id" in response - r_wfsid: str = response["id"] + # Now find the first step (index 0)... + first_step: dict[str, Any] = wf_response["steps"][0] - # Launch the first step. + # Launch it. # If there's a launch problem the step (and running workflow) will have # and error, stopping it. There will be no Pod event as the launch has failed. - self._launch(wf=wf_response, rwf=rwf_response, rwfs_id=r_wfsid, step=first_step) + self._launch(wf=wf_response, rwf=rwf_response, step=first_step) def _handle_workflow_stop_message(self, r_wfid: str) -> None: """Logic to handle a STOP message.""" @@ -248,44 +234,17 @@ def _handle_pod_message(self, msg: PodMessage) -> None: self._set_step_error(step_name, r_wfid, r_wfsid, exit_code, "Job failed") return - # If we get here the prior step completed successfully and we can decide - # whether the step has outputs (files) that need to be written to the - # Project directory, while also marking the Step as DONE (successfully). - # We pass the outputs to the DM via a call to the API adapter's realise_outputs(). - # In return it copies (links) these files to the Project directory. + # If we get here the prior step completed successfullyso we + # mark the Step as DONE (successfully). wfid = rwf_response["workflow"]["id"] assert wfid wf_response, _ = self._wapi_adapter.get_workflow(workflow_id=wfid) _LOGGER.debug("API.get_workflow(%s) returned: -\n%s", wfid, str(wf_response)) - error_num: int | None = None - error_msg: str | None = None - if workflow_step_has_outputs(wf_response, step_name): - # The step produces at least one output. - # Inform the DM so it can link them to the Project directory - response, status_code = self._wapi_adapter.realise_outputs( - running_workflow_step_id=r_wfsid, - ) - if status_code != HTTPStatus.OK: - error_num = status_code - error_msg = ( - response["error"] - if "error" in response - else "Undisclosed error when realising outputs" - ) - - if error_num is not None: - # The job was successful but linking outputs (back to the Project directory) - # appears to have failed. - self._set_step_error(step_name, r_wfid, r_wfsid, error_num, error_msg) - return - # We then inspect the Workflow to determine the next step. self._wapi_adapter.set_running_workflow_step_done( running_workflow_step_id=r_wfsid, - success=error_num is None, - error_num=error_num, - error_msg=error_msg, + success=True, ) # We have the step from the Instance that's just finished, @@ -299,33 +258,14 @@ def _handle_pod_message(self, msg: PodMessage) -> None: launch_attempted: bool = False for step in wf_response["steps"]: if step["name"] == step_name: + step_index = wf_response["steps"].index(step) if step_index + 1 < len(wf_response["steps"]): - # There's another step - for this simple logic it is the next step. - + # There's another step! + # For this simple logic it is the next step. next_step = wf_response["steps"][step_index + 1] - next_step_name = next_step["name"] - rwfs_response, _ = self._wapi_adapter.create_running_workflow_step( - running_workflow_id=r_wfid, - step=next_step_name, - ) - assert "id" in rwfs_response - r_wfsid = rwfs_response["id"] - assert r_wfsid - _LOGGER.debug( - "API.create_running_workflow_step(%s, %s) returned: -\n%s", - r_wfid, - next_step_name, - str(response), - ) - - self._launch( - wf=wf_response, - rwf=rwf_response, - rwfs_id=r_wfsid, - step=next_step, - ) + self._launch(wf=wf_response, rwf=rwf_response, step=next_step) # Something was started (or there was a launch error and the step # and running workflow error will have been set). @@ -340,27 +280,13 @@ def _handle_pod_message(self, msg: PodMessage) -> None: success=True, ) - def _validate_step_command( - self, - *, - running_workflow_step_id: str, - step: dict[str, Any], - workflow_steps: list[dict[str, Any]], - our_step_index: int, - running_workflow_variables: dict[str, Any] | None = None, - ) -> str | dict[str, Any]: - """Returns an error message if the command isn't valid. - Without a message we return all the variables that were (successfully) - applied to the command. - - We are also given a list of steps in workflow_steps and out position in - the list with our_step_index.""" - assert our_step_index >= 0 - + def _get_step_job(self, *, step: dict[str, Any]) -> dict[str, Any]: + """Gets the Job definition for a given Step.""" # We get the Job from the step specification, which must contain # the keys "collection", "job", and "version". Here we assume that # the workflow definition has passed the RUN-level validation # which means we can get these values. + assert "specification" in step step_spec: dict[str, Any] = step["specification"] job_collection: str = step_spec["collection"] job_job: str = step_spec["job"] @@ -368,249 +294,209 @@ def _validate_step_command( job, _ = self._wapi_adapter.get_job( collection=job_collection, job=job_job, version=job_version ) + _LOGGER.debug( - "API.get_job(%s, %s, %s) for %s returned: -\n%s", + "API.get_job(%s, %s, %s) returned: -\n%s", job_collection, job_job, job_version, - running_workflow_step_id, str(job), ) - # The step's 'specification' is a string - pass it directly to the - # launcher along with any (optional) 'workflow variables'. The launcher - # will apply the variables to the step's Job command but we need to handle - # any launch problems. The validator should have checked to ensure that - # variable expansion will work, but we must prepare for the unexpected. - # - # What the engine has to do here is make sure that the Job - # that's about to be launched has all its configuration requirements - # satisfied (inputs, outputs and options). Basically we must ensure - # that the Job definition's 'command' can be compiled by applying - # the available variables. - # - # To prevent launcher errors relating to decoding we get the command ourselves - # and then apply the current set of variables. And we use the JobDecoder's - # 'decode()' method to do this. It returns a tuple (str and boolean). - # If the boolean is True then the command can be compiled - # (i.e. it has no missing variables) and the launcher should not complain - # about the command (as we'll pass the same variables to it. - # If the returned boolean is False then we can expect the returned str - # to contain an error message. - # - # The full set of step variables can be obtained - # (in ascending order of priority) from... - # - # 1. The Job Step Specification - # 2. The RunningWorkflow - # - # If variable 'x' is defined in all three then the RunningWorkflow's - # value must be used. - - # 1. Get any variables from the step specification. - all_variables = step_spec.pop("variables") if "variables" in step_spec else {} - # 2. Merge running workflow variables on top of these - if running_workflow_variables: - all_variables |= running_workflow_variables - - # We must always process the current step's variables - _LOGGER.debug("Validating step %s (%s)", step, running_workflow_step_id) - inputs = step.get("inputs", []) - outputs = step.get("outputs", []) - previous_step_outputs = [] - _LOGGER.debug( - "We are at workflow step index %d (%s)", - our_step_index, - running_workflow_step_id, - ) - - if our_step_index > 0: - # resolve all previous steps - previous_step_names = set() - for inp in inputs: - if step_name := inp["from"].get("step", None): - previous_step_names.add(step_name) - - for step in workflow_steps: - if step["name"] in previous_step_names: - previous_step_outputs.extend(step.get("outputs", [])) - - _LOGGER.debug( - "Index %s (%s) workflow_variables=%s", - our_step_index, - running_workflow_step_id, - all_variables, - ) - _LOGGER.debug( - "Index %s (%s) inputs=%s", our_step_index, running_workflow_step_id, inputs - ) - _LOGGER.debug( - "Index %s (%s) outputs=%s", - our_step_index, - running_workflow_step_id, - outputs, - ) - _LOGGER.debug( - "Index %s (%s) previous_step_outputs=%s", - our_step_index, - running_workflow_step_id, - previous_step_outputs, - ) - - # there should probably be an easier way to access this - running_wf_step, _ = self._wapi_adapter.get_running_workflow_step( - running_workflow_step_id=running_workflow_step_id - ) - running_wf_id = running_wf_step["running_workflow"]["id"] - running_wf, _ = self._wapi_adapter.get_running_workflow( - running_workflow_id=running_wf_id - ) - workflow_id = running_wf["workflow"]["id"] - workflow, _ = self._wapi_adapter.get_workflow(workflow_id=workflow_id) - - step_vars = set_step_variables( - workflow=workflow, - workflow_variables=all_variables, - inputs=inputs, - outputs=outputs, - previous_step_outputs=previous_step_outputs, - step_name=running_wf_step["name"], - ) - all_variables |= step_vars - _LOGGER.debug( - "Index %s (%s) all_variables=%s", - our_step_index, - running_workflow_step_id, - all_variables, - ) + return job - # Set the variables for this step (so they can be inspected on error) - self._wapi_adapter.set_running_workflow_step_variables( - running_workflow_step_id=running_workflow_step_id, - variables=all_variables, - ) + def _validate_step_command( + self, + *, + running_workflow_id: str, + step: dict[str, Any], + running_workflow_variables: dict[str, Any], + ) -> str | dict[str, Any]: + """Returns an error message if the command isn't valid. + Without a message we return all the variables that were (successfully) + applied to the command.""" + + # Start with any variables provided in the step's specification. + # This will be ou t"all variables" map for this step, + # whcih we will add to (and maybe even over-write)... + all_variables: dict[str, Any] = step["specification"].get("variables", {}) + + # Next, we iterate through the step's "variable mapping" block. + # This tells us all the variables that are set from either the + # 'workflow' or 'a prior step'. + + # Start with any workflow variables in the step. + # This will be a list of Translations of "in" and "out" variable names. + # "in" variables are worklfow variables, and "out" variables + # are expected Job variables. We use this to add variables + # to the "all variables" map. + for tr in get_step_workflow_variable_mapping(step=step): + assert tr.in_ in running_workflow_variables + all_variables[tr.out] = running_workflow_variables[tr.in_] + + # Now we apply variables from the "variable mapping" block + # related to values used in prior steps. The decoder gives + # us a map indexed by prior step name that's a list of "in" "out" + # tuples as above. + step_prior_v_map: dict[str, list[Translation]] = ( + get_step_prior_step_variable_mapping(step=step) + ) + for prior_step_name, v_map in step_prior_v_map.items(): + # Retrieve the prior "running" step + # in order to get the variables that were set there... + prior_step, _ = self._wapi_adapter.get_running_workflow_step_by_name( + name=prior_step_name, running_workflow_id=running_workflow_id + ) + # Copy "in" value to "out"... + for tr in v_map: + assert tr.in_ in prior_step["variables"] + all_variables[tr.out] = prior_step["variables"][tr.in_] # Now ... can the command be compiled!? - message, success = decode( + job: dict[str, Any] = self._get_step_job(step=step) + message, success = job_defintion_decoder.decode( job["command"], all_variables, "command", TextEncoding.JINJA2_3_0 ) return all_variables if success else message def _launch( - self, - *, - wf: dict[str, Any], - rwf: dict[str, Any], - rwfs_id: str, - step: dict[str, Any], + self, *, wf: dict[str, Any], rwf: dict[str, Any], step: dict[str, Any] ) -> None: step_name: str = step["name"] rwf_id: str = rwf["id"] + project_id = rwf["project"]["id"] - _LOGGER.info("Validating step command: %s (step=%s)...", rwf_id, step_name) - - # Get step data - importantly, giving us the sequence of steps in the response. - # Steps will be in wf_step_data["steps"] and our position in the list - # is wf_step_data["caller_step_index"] - wf_step_data, _ = self._wapi_adapter.get_workflow_steps_driving_this_step( - running_workflow_step_id=rwfs_id, - ) - assert wf_step_data["caller_step_index"] >= 0 - our_step_index: int = wf_step_data["caller_step_index"] + # A mojor piece of work to accomplish is to get ourselves into a position + # that allows us to check the step command can be executed. + # We do this by compiling a map of variables we belive the step needs. - # Now check the step command can be executed - # (by trying to decoding the Job command). - # - # We pass in the workflow variables (these are provided by the user - # when the workflow is run. All workflow variables will be present in the - # running workflow record) - running_workflow_variables: dict[str, Any] | None = rwf.get("variables") + # We start with all the workflow variables that were provided + # by the user when they "ran" the workflow. We're given a full set of + # variables in response (on success) or an error string (on failure) + rwf_variables: dict[str, Any] = rwf.get("variables", {}) error_or_variables: str | dict[str, Any] = self._validate_step_command( - running_workflow_step_id=rwfs_id, + running_workflow_id=rwf_id, step=step, - workflow_steps=wf_step_data["steps"], - our_step_index=our_step_index, - running_workflow_variables=running_workflow_variables, + running_workflow_variables=rwf_variables, ) if isinstance(error_or_variables, str): error_msg = error_or_variables msg = f"Failed command validation error_msg={error_msg}" _LOGGER.warning(msg) - self._set_step_error(step_name, rwf_id, rwfs_id, 1, msg) + self._set_step_error(step_name, rwf_id, None, 1, msg) return - project_id = rwf["project"]["id"] variables: dict[str, Any] = error_or_variables - _LOGGER.info( - "Launching step: RunningWorkflow=%s RunningWorkflowStep=%s step=%s" - " (name=%s project=%s, variables=%s)", - rwf_id, - rwfs_id, - step_name, - rwf["name"], - project_id, - variables, - ) - - # When we launch a step we need to identify all the prior steps in the workflow, - # those we depend on. The DataManager will then link their outputs to - # out instance directory. For simple workflows there is only one prior step, - # and it's the one immediately prior to this one. + # A step replication number, + # used only for steps expected to run in parallel (even if just once) + step_replication_number: int = 0 + # Do we replicate this step (run it more than once)? + # We do if a variable in this step's mapping block + # refers to an output of a prior step whose type is 'files'. + # If the prior step is a 'splitter' we populate the 'replication_values' array + # with the list of files the prior step genrated for its output. # - # We put all the prior step IDs in: - - # 'running_workflow_step_prior_steps' - # A list of step UUID strings. - # - # In this 'simple' linear implementation that is simply the immediately - # preceding step. - prior_steps: list[str] = [] - if our_step_index > 0: - # We need the step ID of the prior step. - prior_step_name: str = wf_step_data["steps"][our_step_index - 1]["name"] - step_response, _ = self._wapi_adapter.get_running_workflow_step_by_name( - name=prior_step_name, + # In this engine we onlhy act on the _first_ match, i.e. there CANNOT + # be more than one prior step variable that is 'files'! + replication_values: list[str] = [] + iter_variable: str | None = None + tr_map: dict[str, list[Translation]] = get_step_prior_step_variable_mapping( + step=step + ) + for p_step_name, tr_list in tr_map.items(): + # We need to get the Job definition for each step + # and then check whether the (ouptu) variable is of type 'files'... + wf_step: dict[str, Any] = get_step(wf, p_step_name) + assert wf_step + job_definition: dict[str, Any] = self._get_step_job(step=wf_step) + jd_outputs: dict[str, Any] = job_defintion_decoder.get_outputs( + job_definition + ) + for tr in tr_list: + if jd_outputs.get(tr.in_, {}).get("type") == "files": + iter_variable = tr.out + # Get the prior running step's output values + response, _ = self._wapi_adapter.get_running_workflow_step_by_name( + name=p_step_name, + running_workflow_id=rwf_id, + ) + rwfs_id = response["id"] + assert rwfs_id + result, _ = ( + self._wapi_adapter.get_running_workflow_step_output_values_for_output( + running_workflow_step_id=rwfs_id, + output_variable=tr.in_, + ) + ) + replication_values = result["output"].copy() + break + # Stop if we've got an iteration variable + if iter_variable: + break + + num_step_instances: int = max(1, len(replication_values)) + for iteration in range(num_step_instances): + + # If we are replicating this step then we must replace the step's variable + # with a value expected for this iteration. + if iter_variable: + iter_value: str = replication_values[iteration] + _LOGGER.info( + "Replicating step: %s iteration=%s variable=%s value=%s", + step_name, + iteration, + iter_variable, + iter_value, + ) + # Over-write the replicating variable + # and set the replication number to a unique +ve non-zero value... + variables[iter_variable] = iter_value + step_replication_number = iteration + 1 + + _LOGGER.info( + "Launching step: %s RunningWorkflow=%s (name=%s)" + " variables=%s project=%s", + step_name, + rwf_id, + rwf["name"], + variables, + project_id, + ) + + lp: LaunchParameters = LaunchParameters( + project_id=project_id, + name=step_name, + debug=rwf.get("debug"), + launching_user_name=rwf["running_user"], + launching_user_api_token=rwf["running_user_api_token"], + specification=step["specification"], + variables=variables, running_workflow_id=rwf_id, + step_name=step_name, + step_replication_number=step_replication_number, ) - assert "id" in step_response - prior_steps.append(step_response["id"]) - - # We must also identify workflow inputs that are required by the step we are - # about to launch and pass those using a launch parameter. The launcher - # will ensure these are copied into out instance directory before we are run. - # We cannot provide the variable values (even though we have them) because - # the DM passes input through 'InputHandlers', which may translate the value. - # So we have to pass the name and let the DM move the files after - # the InputHandler has run. - # - # 'running_workflow_step_inputs' - # A list of Job input variable names - inputs: list[str] = [] - inputs.extend(iter(get_workflow_job_input_names_for_step(wf, step_name))) - lp: LaunchParameters = LaunchParameters( - project_id=project_id, - name=step_name, - debug=rwf.get("debug"), - launching_user_name=rwf["running_user"], - launching_user_api_token=rwf["running_user_api_token"], - specification=step["specification"], - specification_variables=variables, - running_workflow_id=rwf_id, - running_workflow_step_id=rwfs_id, - running_workflow_step_prior_steps=prior_steps, - running_workflow_step_inputs=inputs, - ) - lr: LaunchResult = self._instance_launcher.launch(launch_parameters=lp) - if lr.error_num: - self._set_step_error(step_name, rwf_id, rwfs_id, lr.error_num, lr.error_msg) - else: - _LOGGER.info("Launched step '%s' (command=%s)", step_name, lr.command) + lr: LaunchResult = self._instance_launcher.launch(launch_parameters=lp) + rwfs_id = lr.running_workflow_step_id + assert rwfs_id + + if lr.error_num: + self._set_step_error( + step_name, rwf_id, rwfs_id, lr.error_num, lr.error_msg + ) + else: + _LOGGER.info( + "Launched step '%s' step_id=%s (command=%s)", + step_name, + rwfs_id, + lr.command, + ) def _set_step_error( self, step_name: str, r_wfid: str, - r_wfsid: str, + r_wfsid: str | None, error_num: Optional[int], error_msg: Optional[str], ) -> None: @@ -623,12 +509,14 @@ def _set_step_error( error_msg, ) r_wf_error: str = f"Step '{step_name}' ERROR({error_num}): {error_msg}" - self._wapi_adapter.set_running_workflow_step_done( - running_workflow_step_id=r_wfsid, - success=False, - error_num=error_num, - error_msg=r_wf_error, - ) + # There may be a pre-step error (so assume the ID can also be None) + if r_wfsid: + self._wapi_adapter.set_running_workflow_step_done( + running_workflow_step_id=r_wfsid, + success=False, + error_num=error_num, + error_msg=r_wf_error, + ) # We must also set the running workflow as done (failed) self._wapi_adapter.set_running_workflow_done( running_workflow_id=r_wfid, diff --git a/workflow/workflow_validator.py b/workflow/workflow_validator.py index 6324bd5..4a646d3 100644 --- a/workflow/workflow_validator.py +++ b/workflow/workflow_validator.py @@ -5,11 +5,9 @@ from typing import Any from .decoder import ( - get_required_variable_names, - get_step_input_variable_names, get_step_output_variable_names, get_steps, - get_variable_names, + get_workflow_variable_names, validate_schema, ) @@ -113,43 +111,6 @@ def _validate_tag_level( error_num=2, error_msg=[f"Duplicate step names found: {', '.join(duplicate_names)}"], ) - # Workflow variables must be unique. - duplicate_names.clear() - variable_names.clear() - wf_variable_names: list[str] = get_variable_names(workflow_definition) - for wf_variable_name in wf_variable_names: - if ( - wf_variable_name not in duplicate_names - and wf_variable_name in variable_names - ): - duplicate_names.add(wf_variable_name) - variable_names.add(wf_variable_name) - if duplicate_names: - return ValidationResult( - error_num=6, - error_msg=[ - f"Duplicate workflow variable names found: {', '.join(duplicate_names)}" - ], - ) - # For each 'replicating' step the replicating variable - # must be declared in the step. - for step in get_steps(workflow_definition): - if ( - replicate_using_input := step.get("replicate", {}) - .get("using", {}) - .get("input") - ): - step_name = step["name"] - if replicate_using_input not in get_step_input_variable_names( - workflow_definition, step_name - ): - return ValidationResult( - error_num=7, - error_msg=[ - "Replicate input variable is not declared:" - f" {replicate_using_input} (step={step_name})" - ], - ) return _VALIDATION_SUCCESS @@ -163,7 +124,7 @@ def _validate_run_level( assert workflow_definition # We must have values for all the variables defined in the workflow. - wf_variables: list[str] = get_required_variable_names(workflow_definition) + wf_variables: set[str] = get_workflow_variable_names(workflow_definition) missing_values: list[str] = [] missing_values.extend( wf_variable @@ -172,7 +133,7 @@ def _validate_run_level( ) if missing_values: return ValidationResult( - error_num=7, + error_num=8, error_msg=[ f"Missing workflow variable values for: {', '.join(missing_values)}" ],