diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d5ba3ad..bdb19f0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,9 +8,10 @@ Changelog for Stingray Controller Added: ^^^^^^ - Handling for new firmware error reporting +- Stim waiting state to add spinner to stim start/stop button -0.3.0 (unreleased) +0.3.0 (2023-07-11) ------------------ Added: @@ -26,8 +27,8 @@ Fixed: ^^^^^^ - Error message for when: - - The instrument's firmware is incompatible with the software - - An error occurs during software install + - The instrument's firmware is incompatible with the software + - An error occurs during software install 0.2.0 (2023-06-05) diff --git a/controller/build.py b/controller/build.py index 81eb288..9f14a13 100644 --- a/controller/build.py +++ b/controller/build.py @@ -32,5 +32,7 @@ for output in cmd.get_outputs(): relative_extension = os.path.relpath(output, cmd.build_lib) - # TODO delete the .so file it if already exists - shutil.copyfile(output, os.path.join("src", relative_extension)) + dest_path = os.path.join("src", relative_extension) + if os.path.exists(dest_path): + os.remove(dest_path) + shutil.copyfile(output, dest_path) diff --git a/controller/poetry.lock b/controller/poetry.lock index 1ae368b..6863ba2 100644 --- a/controller/poetry.lock +++ b/controller/poetry.lock @@ -40,14 +40,14 @@ files = [ [[package]] name = "anyio" -version = "3.6.2" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] [package.dependencies] @@ -55,9 +55,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16,<0.22)"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] [[package]] name = "attrs" @@ -102,91 +102,6 @@ files = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -[[package]] -name = "charset-normalizer" -version = "3.1.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, -] - [[package]] name = "colorama" version = "0.4.6" @@ -201,63 +116,72 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.extras] @@ -354,19 +278,19 @@ files = [ [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.2" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "freezegun" @@ -409,52 +333,48 @@ files = [ [[package]] name = "h5py" -version = "3.8.0" +version = "3.9.0" description = "Read and write HDF5 files from Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "h5py-3.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:533d7dad466ddb7e3b30af274b630eb7c1a6e4ddf01d1c373a0334dc2152110a"}, - {file = "h5py-3.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c873ba9fd4fa875ad62ce0e4891725e257a8fe7f5abdbc17e51a5d54819be55c"}, - {file = "h5py-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98a240cd4c1bfd568aaa52ec42d263131a2582dab82d74d3d42a0d954cac12be"}, - {file = "h5py-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3389b63222b1c7a158bb7fe69d11ca00066740ec5574596d47a2fe5317f563a"}, - {file = "h5py-3.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:7f3350fc0a8407d668b13247861c2acd23f7f5fe7d060a3ad9b0820f5fcbcae0"}, - {file = "h5py-3.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db03e3f2c716205fbdabb34d0848459840585225eb97b4f08998c743821ca323"}, - {file = "h5py-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36761693efbe53df179627a775476dcbc37727d6e920958277a7efbc18f1fb73"}, - {file = "h5py-3.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a506fc223def428f4329e7e1f9fe1c8c593eab226e7c0942c8d75308ad49950"}, - {file = "h5py-3.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33b15aae79e9147aebe1d0e54099cbcde8d65e3e227cd5b59e49b1272aa0e09d"}, - {file = "h5py-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f6f6ffadd6bfa9b2c5b334805eb4b19ca0a5620433659d8f7fb86692c40a359"}, - {file = "h5py-3.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8f55d9c6c84d7d09c79fb85979e97b81ec6071cc776a97eb6b96f8f6ec767323"}, - {file = "h5py-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b685453e538b2b5934c58a644ac3f3b3d0cec1a01b6fb26d57388e9f9b674ad0"}, - {file = "h5py-3.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:377865821fe80ad984d003723d6f8890bd54ceeb5981b43c0313b9df95411b30"}, - {file = "h5py-3.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0fef76e10b9216657fa37e7edff6d8be0709b25bd5066474c229b56cf0098df9"}, - {file = "h5py-3.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ffc344ec9984d2cd3ca0265007299a8bac8d85c1ad48f4639d8d3aed2af171"}, - {file = "h5py-3.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bacaa1c16810dd2b3e4417f8e730971b7c4d53d234de61fe4a918db78e80e1e4"}, - {file = "h5py-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bae730580ae928de409d63cbe4fdca4c82c3ad2bed30511d19d34e995d63c77e"}, - {file = "h5py-3.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47f757d1b76f0ecb8aa0508ec8d1b390df67a8b67ee2515dc1b046f3a1596ea"}, - {file = "h5py-3.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f891b17e3a3e974e93f9e34e7cca9f530806543571ce078998676a555837d91d"}, - {file = "h5py-3.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:290e00fa2de74a10688d1bac98d5a9cdd43f14f58e562c580b5b3dfbd358ecae"}, - {file = "h5py-3.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:03890b1c123d024fb0239a3279737d5432498c1901c354f8b10d8221d1d16235"}, - {file = "h5py-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7865de06779b14d98068da387333ad9bf2756b5b579cc887fac169bc08f87c3"}, - {file = "h5py-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49bc857635f935fa30e92e61ac1e87496df8f260a6945a3235e43a9890426866"}, - {file = "h5py-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:5fd2252d1fc364ba0e93dd0b7089f4906b66805cb4e6aca7fa8874ac08649647"}, - {file = "h5py-3.8.0.tar.gz", hash = "sha256:6fead82f0c4000cf38d53f9c030780d81bfa0220218aee13b90b7701c937d95f"}, + {file = "h5py-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb7bdd5e601dd1739698af383be03f3dad0465fe67184ebd5afca770f50df9d6"}, + {file = "h5py-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78e44686334cbbf2dd21d9df15823bc38663f27a3061f6a032c68a3e30c47bf7"}, + {file = "h5py-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68b41efd110ce9af1cbe6fa8af9f4dcbadace6db972d30828b911949e28fadd"}, + {file = "h5py-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12aa556d540f11a2cae53ea7cfb94017353bd271fb3962e1296b342f6550d1b8"}, + {file = "h5py-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d97409e17915798029e297a84124705c8080da901307ea58f29234e09b073ddc"}, + {file = "h5py-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:551e358db05a874a0f827b22e95b30092f2303edc4b91bb62ad2f10e0236e1a0"}, + {file = "h5py-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6822a814b9d8b8363ff102f76ea8d026f0ca25850bb579d85376029ee3e73b93"}, + {file = "h5py-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54f01202cdea754ab4227dd27014bdbd561a4bbe4b631424fd812f7c2ce9c6ac"}, + {file = "h5py-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64acceaf6aff92af091a4b83f6dee3cf8d3061f924a6bb3a33eb6c4658a8348b"}, + {file = "h5py-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:804c7fb42a34c8ab3a3001901c977a5c24d2e9c586a0f3e7c0a389130b4276fc"}, + {file = "h5py-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8d9492391ff5c3c80ec30ae2fe82a3f0efd1e750833739c25b0d090e3be1b095"}, + {file = "h5py-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da9e7e63376c32704e37ad4cea2dceae6964cee0d8515185b3ab9cbd6b947bc"}, + {file = "h5py-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e20897c88759cbcbd38fb45b507adc91af3e0f67722aa302d71f02dd44d286"}, + {file = "h5py-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf5225543ca35ce9f61c950b73899a82be7ba60d58340e76d0bd42bf659235a"}, + {file = "h5py-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:36408f8c62f50007d14e000f9f3acf77e103b9e932c114cbe52a3089e50ebf94"}, + {file = "h5py-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:23e74b878bbe1653ab34ca49b83cac85529cd0b36b9d625516c5830cc5ca2eac"}, + {file = "h5py-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f457089c5d524b7998e3649bc63240679b8fb0a3859ea53bbb06841f3d755f1"}, + {file = "h5py-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6284061f3214335e1eec883a6ee497dbe7a79f19e6a57fed2dd1f03acd5a8cb"}, + {file = "h5py-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7a745efd0d56076999b52e8da5fad5d30823bac98b59c68ae75588d09991a"}, + {file = "h5py-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:79bbca34696c6f9eeeb36a91776070c49a060b2879828e2c8fa6c58b8ed10dd1"}, + {file = "h5py-3.9.0.tar.gz", hash = "sha256:e604db6521c1e367c6bd7fad239c847f53cc46646f2d2651372d05ae5e95f817"}, ] [package.dependencies] -numpy = ">=1.14.5" +numpy = ">=1.17.3" [[package]] name = "httpcore" -version = "0.17.2" +version = "0.17.3" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.17.2-py3-none-any.whl", hash = "sha256:5581b9c12379c4288fe70f43c710d16060c10080617001e6b22a3b6dbcbefd36"}, - {file = "httpcore-0.17.2.tar.gz", hash = "sha256:125f8375ab60036db632f34f4b627a9ad085048eef7cb7d2616fea0f739f98af"}, + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, ] [package.dependencies] @@ -594,36 +514,36 @@ immutable-data-validation = ">=0.2.1" [[package]] name = "llvmlite" -version = "0.40.0" +version = "0.40.1" description = "lightweight wrapper around basic LLVM functionality" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "llvmlite-0.40.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a46db1ed219d93ef05245ec17cf243074ec2b2687209cb310a803a2c2510dc"}, - {file = "llvmlite-0.40.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b9d742b66023298532d0e7beddd3d9f04334c046df7a02a1ec2ba8b4046a978c"}, - {file = "llvmlite-0.40.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ff38c309dc758b996d556e599e00647e6b8dbd21125c06b2d0584a9984a2288"}, - {file = "llvmlite-0.40.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66ecb8cdee35bbbdad9b331f446641977645de1973f6270bf4194307a1753666"}, - {file = "llvmlite-0.40.0-cp310-cp310-win32.whl", hash = "sha256:83dd5148f6ddd4d35585b69ebaa50605fdf8011a5b7259a0463afd4aefc62414"}, - {file = "llvmlite-0.40.0-cp310-cp310-win_amd64.whl", hash = "sha256:f72d6ccbfd9cc7da43098fcef23ffbe173ce2d986215072dbb2e7929412f9ff8"}, - {file = "llvmlite-0.40.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbf19077144e159406ef222348d5330d5061177fb79d3f7f82abf2cf29b77c0b"}, - {file = "llvmlite-0.40.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4732d6c981f658f014dd2ab2b682ac631cd12a6695e77c2d460cc68dc767868"}, - {file = "llvmlite-0.40.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2754c4d2b6f027ab45425abd94dee4cbd228b598531b1e9e1fc15f3298265d88"}, - {file = "llvmlite-0.40.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb79b992bdc2e62c5f5f86263d5546b5298d498e7c1a9d64b3a6f0d31f46ba5b"}, - {file = "llvmlite-0.40.0-cp311-cp311-win_amd64.whl", hash = "sha256:be0ff5b68a86e47a7ec6cd5389bb17b4b8f020b981628c9e714dc2cfdbe89c86"}, - {file = "llvmlite-0.40.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f5d4445eccd9c9c5639b35cb6279231f97cbd77a1c49fb41c05081ff96e041db"}, - {file = "llvmlite-0.40.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:060f00611d8e65d6db80cabba17fbefde9ebefbfb6937fe5677f06cd3e7bbe3c"}, - {file = "llvmlite-0.40.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58f5ba5febb2372418a3d37bd76d51bb987276a6fd979c2f2772b60b9061e575"}, - {file = "llvmlite-0.40.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d1622237e6ce543ac185751f782c7e10cabe45abf2de802cd5dca8023805a5c"}, - {file = "llvmlite-0.40.0-cp38-cp38-win32.whl", hash = "sha256:06803a1a38f911576bbe63a4082334d6661c59f2080e4681de1c66ec4924b0ac"}, - {file = "llvmlite-0.40.0-cp38-cp38-win_amd64.whl", hash = "sha256:87c2114567f95c715ae35b03d82caa0df66a978c93a1ff752964949e9ce596d5"}, - {file = "llvmlite-0.40.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a3382d81fcda57f5502f45a9ca62e0c9103fabd5f817c9820c7e61b9375f3d7"}, - {file = "llvmlite-0.40.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:260b0241c17a1ec585020e1df58ed30b9975c3573c619fa1724ceb4cd53cbe42"}, - {file = "llvmlite-0.40.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f980992b6c9dfee20a1608c5a4d875f8a52d76353ca02470550a85be6e5d3680"}, - {file = "llvmlite-0.40.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52eee9e245ef6eb911d6c2a3a1a66378745a40c637284386031b0915754f457e"}, - {file = "llvmlite-0.40.0-cp39-cp39-win32.whl", hash = "sha256:d27c2ec699b820296659dfd36ead1c527eb190c6d5cb3de24bfbee1024bdc20a"}, - {file = "llvmlite-0.40.0-cp39-cp39-win_amd64.whl", hash = "sha256:6cf84141d1793c69285b88acf4216370cb831eab99778546a2a9002fadac932d"}, - {file = "llvmlite-0.40.0.tar.gz", hash = "sha256:c910b8fbfd67b8e9d0b10ebc012b23cd67cbecef1b96f00d391ddd298d71671c"}, + {file = "llvmlite-0.40.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:84ce9b1c7a59936382ffde7871978cddcda14098e5a76d961e204523e5c372fb"}, + {file = "llvmlite-0.40.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3673c53cb21c65d2ff3704962b5958e967c6fc0bd0cff772998face199e8d87b"}, + {file = "llvmlite-0.40.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba2747cf5b4954e945c287fe310b3fcc484e2a9d1b0c273e99eb17d103bb0e6"}, + {file = "llvmlite-0.40.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd5e82cc990e5a3e343a3bf855c26fdfe3bfae55225f00efd01c05bbda79918"}, + {file = "llvmlite-0.40.1-cp310-cp310-win32.whl", hash = "sha256:09f83ea7a54509c285f905d968184bba00fc31ebf12f2b6b1494d677bb7dde9b"}, + {file = "llvmlite-0.40.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b37297f3cbd68d14a97223a30620589d98ad1890e5040c9e5fc181063f4ed49"}, + {file = "llvmlite-0.40.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a66a5bd580951751b4268f4c3bddcef92682814d6bc72f3cd3bb67f335dd7097"}, + {file = "llvmlite-0.40.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:467b43836b388eaedc5a106d76761e388dbc4674b2f2237bc477c6895b15a634"}, + {file = "llvmlite-0.40.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c23edd196bd797dc3a7860799054ea3488d2824ecabc03f9135110c2e39fcbc"}, + {file = "llvmlite-0.40.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36d9f244b6680cb90bbca66b146dabb2972f4180c64415c96f7c8a2d8b60a36"}, + {file = "llvmlite-0.40.1-cp311-cp311-win_amd64.whl", hash = "sha256:5b3076dc4e9c107d16dc15ecb7f2faf94f7736cd2d5e9f4dc06287fd672452c1"}, + {file = "llvmlite-0.40.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a7525db121f2e699809b539b5308228854ccab6693ecb01b52c44a2f5647e20"}, + {file = "llvmlite-0.40.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:84747289775d0874e506f907a4513db889471607db19b04de97d144047fec885"}, + {file = "llvmlite-0.40.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e35766e42acef0fe7d1c43169a8ffc327a47808fae6a067b049fe0e9bbf84dd5"}, + {file = "llvmlite-0.40.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cda71de10a1f48416309e408ea83dab5bf36058f83e13b86a2961defed265568"}, + {file = "llvmlite-0.40.1-cp38-cp38-win32.whl", hash = "sha256:96707ebad8b051bbb4fc40c65ef93b7eeee16643bd4d579a14d11578e4b7a647"}, + {file = "llvmlite-0.40.1-cp38-cp38-win_amd64.whl", hash = "sha256:e44f854dc11559795bcdeaf12303759e56213d42dabbf91a5897aa2d8b033810"}, + {file = "llvmlite-0.40.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f643d15aacd0b0b0dc8b74b693822ba3f9a53fa63bc6a178c2dba7cc88f42144"}, + {file = "llvmlite-0.40.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39a0b4d0088c01a469a5860d2e2d7a9b4e6a93c0f07eb26e71a9a872a8cadf8d"}, + {file = "llvmlite-0.40.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9329b930d699699846623054121ed105fd0823ed2180906d3b3235d361645490"}, + {file = "llvmlite-0.40.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2dbbb8424037ca287983b115a29adf37d806baf7e1bf4a67bd2cffb74e085ed"}, + {file = "llvmlite-0.40.1-cp39-cp39-win32.whl", hash = "sha256:e74e7bec3235a1e1c9ad97d897a620c5007d0ed80c32c84c1d787e7daa17e4ec"}, + {file = "llvmlite-0.40.1-cp39-cp39-win_amd64.whl", hash = "sha256:ff8f31111bb99d135ff296757dc81ab36c2dee54ed4bd429158a96da9807c316"}, + {file = "llvmlite-0.40.1.tar.gz", hash = "sha256:5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4"}, ] [[package]] @@ -839,30 +759,30 @@ files = [ [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] @@ -890,22 +810,22 @@ virtualenv = ">=20.10.0" [[package]] name = "pulse3d" -version = "0.33.5" +version = "0.33.10" description = "Pulse3D Analysis Platform" category = "main" optional = false python-versions = "*" files = [ - {file = "Pulse3D-0.33.5-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:fc14bb780a13033e2283179ee3d09c21ab5f68ded872a4b4b1d40c4f3b17a17a"}, - {file = "Pulse3D-0.33.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:baf8fdb9bb3cbd49f5c0636cd7ad9ef9fc005555d0d70f1c7b9192f7f87cd435"}, - {file = "Pulse3D-0.33.5-cp310-cp310-win_amd64.whl", hash = "sha256:d23e2433660535929659aa998cc406cfdb8876aa5343606071ad47c02dabc7be"}, - {file = "Pulse3D-0.33.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26c5177d5e00e32b5b94778f0902400edccddc5f905759bb8b3fa2fde542a9ca"}, - {file = "Pulse3D-0.33.5-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:697d6269263d615ca18a34578a78414e1f5506e5940177124319e2fdd6df10ed"}, - {file = "Pulse3D-0.33.5-cp311-cp311-win_amd64.whl", hash = "sha256:af003b089eb92f707cf377ae4b0188bf5467cf4b2bc4e2b88e85e96a074f06a9"}, - {file = "Pulse3D-0.33.5-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a0938297ba173044754ae775d73f2156c0b172765a7057361414ba3ee98267c4"}, - {file = "Pulse3D-0.33.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4ce44bc1264f8f8ef9d9f89ee5db3ff8fa1600060c4b6963d552a024f5ca8b77"}, - {file = "Pulse3D-0.33.5-cp39-cp39-win_amd64.whl", hash = "sha256:1619bad73a51f9532e255786a58dfb43ebf5564d11193ee88384e109b5cb7867"}, - {file = "Pulse3D-0.33.5.tar.gz", hash = "sha256:f99987413e528a8ae5bbe79d25439e80cd8dba88af44aa2e045c121d3559da94"}, + {file = "Pulse3D-0.33.10-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:55b3ccf7fcc83aa0cd0e915cd6d3a528a4e45710e81a60f536cf7d4a59e3b5b7"}, + {file = "Pulse3D-0.33.10-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:97a692f3b05598480b735d507efe283fc2a532b0e54e8229a66d33ba49e21fd9"}, + {file = "Pulse3D-0.33.10-cp310-cp310-win_amd64.whl", hash = "sha256:c115653521b39b16a5182106185c865c53ad178db6faa4b7b9262d4fd4f071c9"}, + {file = "Pulse3D-0.33.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:48f23d2afb5fc44796d2a8cc3a7bb086c4fc6d5dc6a597d85715d1747997d657"}, + {file = "Pulse3D-0.33.10-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:47c92de5c2869f304a7e2aa7c34c6b00f20803db86b5ee64939bddaca1d651f1"}, + {file = "Pulse3D-0.33.10-cp311-cp311-win_amd64.whl", hash = "sha256:364a1b6b0b590744a3291bd0c306c291ec1c58e03781c7834035e56acc40ba6b"}, + {file = "Pulse3D-0.33.10-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:cc44a6af50b0f997727e9f74319f29ca3e70afa2a187b7a90c78d4a90a8228ae"}, + {file = "Pulse3D-0.33.10-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:803fcdeee12a2d8f218a93b3c07810117e9894085bd57044d642647364ef858c"}, + {file = "Pulse3D-0.33.10-cp39-cp39-win_amd64.whl", hash = "sha256:21e97d5e63fb72b760bc5989a6e370b3f64e79631676920729653886e4eef418"}, + {file = "Pulse3D-0.33.10.tar.gz", hash = "sha256:3438e945bc7b5358d624fd898b74b21771008dc41209466b9d29a2a4cafe28b3"}, ] [package.dependencies] @@ -918,7 +838,6 @@ numpy = ">=1.23.4" openpyxl = ">=3.0.7" pandas = "1.5.3" pyarrow = "12.0.0" -requests = ">=2.25.1" scipy = "1.9.3" semver = ">=2.13.0" stdlib-utils = ">=0.4.4" @@ -964,24 +883,24 @@ numpy = ">=1.16.6" [[package]] name = "pyinstaller" -version = "5.8.0" +version = "5.13.0" description = "PyInstaller bundles a Python application and all its dependencies into a single package." category = "dev" optional = false -python-versions = "<3.12,>=3.7" +python-versions = "<3.13,>=3.7" files = [ - {file = "pyinstaller-5.8.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:502a2166165a8e8c3d99c19272e923d2548bac2132424d78910ef9dd8bb11705"}, - {file = "pyinstaller-5.8.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:bf1f7b7e88b467d7aefcdb2bc9cbd2e856ca88c5ab232c0efe0848f146d3bd5f"}, - {file = "pyinstaller-5.8.0-py3-none-manylinux2014_i686.whl", hash = "sha256:a62ee598b137202ef2e99d8dbaee6bc7379a6565c3ddf0331decb41b98eff1a2"}, - {file = "pyinstaller-5.8.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e68bcadf32edc1171ccb06117699a6a4f8e924b7c2c8812cfa00fd0186ade4ee"}, - {file = "pyinstaller-5.8.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ded780f0d3642d7bfc21d97b98d4ec4b41d2fe70c3f5c5d243868612f536e011"}, - {file = "pyinstaller-5.8.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9361eff44c7108c2312f39d85ed768c4ada7e0aa729046bbcef3ef3c1577d18"}, - {file = "pyinstaller-5.8.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5c9632a20faecd6d79f0124afb31e6557414d19be271e572765b474f860f8d76"}, - {file = "pyinstaller-5.8.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:8d004699c5d71c704c14a5f81eec233faa4f87a3bf0ae68e222b87d63f5dd17e"}, - {file = "pyinstaller-5.8.0-py3-none-win32.whl", hash = "sha256:3b74f50a57b1413047042e47033480b7324b091f23dff790a4494af32b377d94"}, - {file = "pyinstaller-5.8.0-py3-none-win_amd64.whl", hash = "sha256:4f4d818588e2d8de4bf24ed018056c3de0c95898ad25719e12d68626161b4933"}, - {file = "pyinstaller-5.8.0-py3-none-win_arm64.whl", hash = "sha256:bacf236b5c2f8f674723a39daca399646dceb470881f842f52e393b9a67ff2f8"}, - {file = "pyinstaller-5.8.0.tar.gz", hash = "sha256:314fb883caf3cbf06adbea2b77671bb73c3481568e994af0467ea7e47eb64755"}, + {file = "pyinstaller-5.13.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:7fdd319828de679f9c5e381eff998ee9b4164bf4457e7fca56946701cf002c3f"}, + {file = "pyinstaller-5.13.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0df43697c4914285ecd333be968d2cd042ab9b2670124879ee87931d2344eaf5"}, + {file = "pyinstaller-5.13.0-py3-none-manylinux2014_i686.whl", hash = "sha256:28d9742c37e9fb518444b12f8c8ab3cb4ba212d752693c34475c08009aa21ccf"}, + {file = "pyinstaller-5.13.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e5fb17de6c325d3b2b4ceaeb55130ad7100a79096490e4c5b890224406fa42f4"}, + {file = "pyinstaller-5.13.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:78975043edeb628e23a73fb3ef0a273cda50e765f1716f75212ea3e91b09dede"}, + {file = "pyinstaller-5.13.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:cd7d5c06f2847195a23d72ede17c60857d6f495d6f0727dc6c9bc1235f2eb79c"}, + {file = "pyinstaller-5.13.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:24009eba63cfdbcde6d2634e9c87f545eb67249ddf3b514e0cd3b2cdaa595828"}, + {file = "pyinstaller-5.13.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1fde4381155f21d6354dc450dcaa338cd8a40aaacf6bd22b987b0f3e1f96f3ee"}, + {file = "pyinstaller-5.13.0-py3-none-win32.whl", hash = "sha256:2d03419904d1c25c8968b0ad21da0e0f33d8d65716e29481b5bd83f7f342b0c5"}, + {file = "pyinstaller-5.13.0-py3-none-win_amd64.whl", hash = "sha256:9fc27c5a853b14a90d39c252707673c7a0efec921cd817169aff3af0fca8c127"}, + {file = "pyinstaller-5.13.0-py3-none-win_arm64.whl", hash = "sha256:3a331951f9744bc2379ea5d65d36f3c828eaefe2785f15039592cdc08560b262"}, + {file = "pyinstaller-5.13.0.tar.gz", hash = "sha256:5e446df41255e815017d96318e39f65a3eb807e74a796c7e7ff7f13b6366a2e9"}, ] [package.dependencies] @@ -989,7 +908,7 @@ altgraph = "*" macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} pyinstaller-hooks-contrib = ">=2021.4" -pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} setuptools = ">=42.0.0" [package.extras] @@ -998,14 +917,14 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] [[package]] name = "pyinstaller-hooks-contrib" -version = "2023.3" +version = "2023.5" description = "Community maintained hooks for PyInstaller" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstaller-hooks-contrib-2023.3.tar.gz", hash = "sha256:bb39e1038e3e0972420455e0b39cd9dce73f3d80acaf4bf2b3615fea766ff370"}, - {file = "pyinstaller_hooks_contrib-2023.3-py2.py3-none-any.whl", hash = "sha256:062ad7a1746e1cfc24d3a8c4be4e606fced3b123bda7d419f14fcf7507804b07"}, + {file = "pyinstaller-hooks-contrib-2023.5.tar.gz", hash = "sha256:cca6cdc31e739954b5bbbf05ef3f71fe448e9cdacad3a2197243bcf99bea2c00"}, + {file = "pyinstaller_hooks_contrib-2023.5-py2.py3-none-any.whl", hash = "sha256:e60185332a6b56691f471d364e9e9405b03091ca27c96e0dbebdedb7624457fd"}, ] [[package]] @@ -1217,14 +1136,14 @@ files = [ [[package]] name = "pywin32-ctypes" -version = "0.2.0" -description = "" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, - {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] [[package]] @@ -1277,28 +1196,6 @@ files = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -[[package]] -name = "requests" -version = "2.28.2" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" -files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - [[package]] name = "scipy" version = "1.9.3" @@ -1352,14 +1249,14 @@ files = [ [[package]] name = "setuptools" -version = "67.8.0" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] @@ -1417,23 +1314,6 @@ files = [ [package.extras] test = ["codecov", "coverage", "mypy", "nptyping (>=1.3.0)", "numpy", "pycodestyle", "pylint", "pytest"] -[[package]] -name = "urllib3" -version = "1.26.16" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - [[package]] name = "validator-collection" version = "1.5.0" @@ -1455,24 +1335,24 @@ test = ["codecov", "coverage", "pyfakefs", "pytest", "pytest-benchmark", "pytest [[package]] name = "virtualenv" -version = "20.23.0" +version = "20.23.1" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, - {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, + {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, + {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.11,<4" -platformdirs = ">=3.2,<4" +filelock = ">=3.12,<4" +platformdirs = ">=3.5.1,<4" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] [[package]] name = "websockets" @@ -1555,17 +1435,17 @@ files = [ [[package]] name = "xlsxwriter" -version = "3.1.1" +version = "3.1.2" description = "A Python module for creating Excel XLSX files." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "XlsxWriter-3.1.1-py3-none-any.whl", hash = "sha256:b50e3bd905d7dafa6ea45210e2cc5600b4ccd104a0d3a4d4d7cf813b78426440"}, - {file = "XlsxWriter-3.1.1.tar.gz", hash = "sha256:03459ee76f664470c4c63a8977cab624fb259d0fc1faac64dc9cc6f3cc08f945"}, + {file = "XlsxWriter-3.1.2-py3-none-any.whl", hash = "sha256:331508ff39d610ecdaf979e458840bc1eab6e6a02cfd5d08f044f0f73636236f"}, + {file = "XlsxWriter-3.1.2.tar.gz", hash = "sha256:78751099a770273f1c98b8d6643351f68f98ae8e6acf9d09d37dc6798f8cd3de"}, ] [metadata] lock-version = "2.0" python-versions = "~3.11.3" -content-hash = "c8f6eb870a595c68f47a4c69f0a019c608415422068b5eb2a467667d116c588f" +content-hash = "6dc243408801fc7410e2bdcae3e5f7d83187171004d79913f50e148325123e49" diff --git a/controller/pyproject.toml b/controller/pyproject.toml index beceb60..a513285 100644 --- a/controller/pyproject.toml +++ b/controller/pyproject.toml @@ -15,10 +15,11 @@ httpx = "0.24.1" aioserial = "1.3.1" Cython = "0.29.34" immutabledict = "2.2.3" +jsonschema = "4.17.3" # Tanner (7/7/23): pinning this to avoid issues with pyinstaller. Can probably remove this dependency entirely once labware-domain-models is removed labware-domain-models = "0.3.1" numpy = "1.23.5" # pinned for pulse3d # psutil = "5.9.4" -pulse3d = "0.33.5" +pulse3d = "0.33.10" pyserial = "3.5" semver = "2.13.0" stdlib-utils = "0.5.2" @@ -33,7 +34,7 @@ aioconsole = "0.6.0" freezegun = "1.2.2" # pefile = "2023.2.7" # Tanner (2/24/23): this must be explicitly specified so that it will be included in a Windows build environment pre-commit = "3.1.1" -pyinstaller = "5.8.0" +pyinstaller = "5.13.0" pytest = "7.2.1" pytest-asyncio = "0.20.3" pytest-cov = "4.0.0" @@ -42,7 +43,6 @@ pytest-profiling = "1.7.0" pytest-randomly = "3.12.0" pytest-timeout = "2.1.0" # pywin32-ctypes = "0.2.0" # Tanner (2/24/23): this must be explicitly specified so that it will be included in a Windows build environment -requests = "2.28.2" [build-system] requires = ["poetry-core", "setuptools", "Cython", "numpy"] @@ -68,7 +68,7 @@ ignore_missing_imports = true [tool.pytest.ini_options] -addopts = "--cov=controller --cov-report html --cov-branch --cov-report term-missing:skip-covered --cov-fail-under=46" +addopts = "--cov=controller --cov-report html --cov-branch --cov-report term-missing:skip-covered --cov-fail-under=53" markers = [ "only_run_in_ci", # marks tests that only need to be run during full Continuous Integration testing environment (select to run with '--full-ci' if conftest.py configured) "slow", # marks tests that take a bit longer to run, but can be run during local development (select to run with '--include-slow-tests' if conftest.py configured) diff --git a/controller/src/controller/constants.py b/controller/src/controller/constants.py index fe7c65b..e6f8b78 100644 --- a/controller/src/controller/constants.py +++ b/controller/src/controller/constants.py @@ -11,6 +11,8 @@ from labware_domain_models import LabwareDefinition +# TODO organize this file + # General CURRENT_SOFTWARE_VERSION = "REPLACETHISWITHVERSIONDURINGBUILD" COMPILED_EXE_BUILD_TIMESTAMP = "REPLACETHISWITHTIMESTAMPDURINGBUILD" @@ -18,9 +20,12 @@ DEFAULT_SERVER_PORT_NUMBER = 4565 +CURRENT_RECORDING_FILE_VERSION = "2.0.0" + NUM_WELLS = 24 GENERIC_24_WELL_DEFINITION = LabwareDefinition(row_count=4, column_count=6) +RECORDINGS_SUBDIR = "recordings" FW_UPDATE_SUBDIR = "firmware_updates" AuthTokens = namedtuple("AuthTokens", ["access", "refresh"]) @@ -30,6 +35,7 @@ VALID_CREDENTIAL_TYPES = frozenset(AuthCreds._fields) VALID_CONFIG_SETTINGS = frozenset(ConfigSettings._fields) +BARCODE_LEN = 12 # TODO try replacing all immutabledicts with enums BARCODE_HEADERS: immutabledict[str, str] = immutabledict({"plate_barcode": "ML", "stim_barcode": "MS"}) ALL_VALID_BARCODE_HEADERS = frozenset(BARCODE_HEADERS.values()) @@ -53,17 +59,23 @@ class SystemStatuses(Enum): # boot up states - SERVER_INITIALIZING_STATE = uuid.UUID("04471bcf-1a00-4a0d-83c8-4160622f9a25") - SERVER_READY_STATE = uuid.UUID("8e24ef4d-2353-4e9d-aa32-4346126e73e3") - SYSTEM_INITIALIZING_STATE = uuid.UUID("d2e3d386-b760-4c9a-8b2d-410362ff11c4") - CHECKING_FOR_UPDATES_STATE = uuid.UUID("04fd6f6b-ee9e-4656-aae4-0b9584791f36") + SERVER_INITIALIZING = uuid.UUID("04471bcf-1a00-4a0d-83c8-4160622f9a25") + SERVER_READY = uuid.UUID("8e24ef4d-2353-4e9d-aa32-4346126e73e3") + SYSTEM_INITIALIZING = uuid.UUID("d2e3d386-b760-4c9a-8b2d-410362ff11c4") + CHECKING_FOR_UPDATES = uuid.UUID("04fd6f6b-ee9e-4656-aae4-0b9584791f36") + # initial set up states + CALIBRATION_NEEDED = uuid.UUID("009301eb-625c-4dc4-9e92-1a4d0762465f") + CALIBRATING = uuid.UUID("43c08fc5-ca2f-4dcd-9dff-5e9324cb5dbf") # normal operation states - IDLE_READY_STATE = uuid.UUID("009301eb-625c-4dc4-9e92-1a4d0762465f") + IDLE_READY = uuid.UUID("b480373b-9466-4fa0-92a6-fa5f8e340d30") + BUFFERING = uuid.UUID("dc774d4b-6bd1-4717-b36e-6df6f1ef6cf4") + LIVE_VIEW_ACTIVE = uuid.UUID("9fbee58e-c6af-49a5-b2e2-5b085eead2ea") + RECORDING = uuid.UUID("1e3d76a2-508d-4c99-8bf5-60dac5cc51fe") # updating states - UPDATES_NEEDED_STATE = uuid.UUID("d6dcf2a9-b6ea-4d4e-9423-500f91a82a2f") - DOWNLOADING_UPDATES_STATE = uuid.UUID("b623c5fa-af01-46d3-9282-748e19fe374c") - INSTALLING_UPDATES_STATE = uuid.UUID("19c9c2d6-0de4-4334-8cb3-a4c7ab0eab00") - UPDATES_COMPLETE_STATE = uuid.UUID("31f8fbc9-9b41-4191-8598-6462b7490789") + UPDATES_NEEDED = uuid.UUID("d6dcf2a9-b6ea-4d4e-9423-500f91a82a2f") + DOWNLOADING_UPDATES = uuid.UUID("b623c5fa-af01-46d3-9282-748e19fe374c") + INSTALLING_UPDATES = uuid.UUID("19c9c2d6-0de4-4334-8cb3-a4c7ab0eab00") + UPDATES_COMPLETE = uuid.UUID("31f8fbc9-9b41-4191-8598-6462b7490789") class StimulationStates(Enum): @@ -209,6 +221,24 @@ class SerialCommPacketTypes(IntEnum): GOING_DORMANT_HANDSHAKE_TIMEOUT_CODE = 0 +# Magnetometer configuration +SERIAL_COMM_SENSOR_AXIS_LOOKUP_TABLE: immutabledict[str, dict[str, int]] = immutabledict( + { + "A": {"X": 0, "Y": 1, "Z": 2}, + "B": {"X": 3, "Y": 4, "Z": 5}, + "C": {"X": 6, "Y": 7, "Z": 8}, + } +) +NUM_CHANNELS_PER_MAG_SENSOR = 3 +NUM_MAG_SENSORS_PER_WELL = 3 +NUM_MAG_DATA_CHANNELS_PER_WELL = NUM_CHANNELS_PER_MAG_SENSOR * NUM_MAG_SENSORS_PER_WELL + +DEFAULT_MAG_DATA_CHANNEL = SERIAL_COMM_SENSOR_AXIS_LOOKUP_TABLE["A"]["Z"] +DEFAULT_MAG_SAMPLING_PERIOD = 10000 # valid as of 4/12/23 +NUM_MAG_DATA_PACKETS_PER_SECOND = MICRO_TO_BASE_CONVERSION // DEFAULT_MAG_SAMPLING_PERIOD + +NUM_INITIAL_MAG_PACKETS_TO_DROP = 2 + # Stimulation STIM_MAX_ABSOLUTE_CURRENT_MICROAMPS = int(100e3) STIM_MAX_ABSOLUTE_VOLTAGE_MILLIVOLTS = int(1.2e3) @@ -307,3 +337,10 @@ class StimProtocolStatuses(IntEnum): STIM_WELL_IDX_TO_MODULE_ID: immutabledict[int, int] = immutabledict( {well_idx: module_id for module_id, well_idx in STIM_MODULE_ID_TO_WELL_IDX.items()} ) + + +# Recording +CALIBRATION_RECORDING_DUR_SECONDS = 30 + +FILE_WRITER_BUFFER_SIZE_SECONDS = 30 +FILE_WRITER_BUFFER_SIZE_MILLISECONDS = FILE_WRITER_BUFFER_SIZE_SECONDS * MICRO_TO_BASE_CONVERSION diff --git a/controller/src/controller/exceptions.py b/controller/src/controller/exceptions.py index eb7e162..65f923c 100644 --- a/controller/src/controller/exceptions.py +++ b/controller/src/controller/exceptions.py @@ -13,6 +13,10 @@ class WebsocketCommandError(Exception): pass +class WebsocketCommandNoOpException(Exception): + pass + + class ElectronControllerVersionMismatchError(Exception): pass diff --git a/controller/src/controller/main.py b/controller/src/controller/main.py index 62cd6c9..6bc4586 100644 --- a/controller/src/controller/main.py +++ b/controller/src/controller/main.py @@ -4,11 +4,9 @@ import argparse import asyncio -import hashlib import logging import os import platform -import socket import sys from typing import Any import uuid @@ -28,6 +26,7 @@ from .subsystems.cloud_comm import CloudComm from .subsystems.instrument_comm import InstrumentComm from .utils.aio import wait_tasks_clean +from .utils.generic import get_hash_of_computer_name from .utils.logging import configure_logging from .utils.logging import redact_sensitive_info_from_path from .utils.state_management import SystemStateManager @@ -73,18 +72,26 @@ async def main(command_line_args: list[str]) -> None: system_state_manager = SystemStateManager() await system_state_manager.update(initialize_system_state(parsed_args, log_file_id)) - queues = create_system_queues() + comm_queues = create_system_comm_queues() + data_queues = create_system_data_queues() # create subsystems - system_monitor = SystemMonitor(system_state_manager, queues) + system_monitor = SystemMonitor(system_state_manager, comm_queues) server = Server( - system_state_manager.get_read_only_copy, queues["to"]["server"], queues["from"]["server"] + system_state_manager.get_read_only_copy, + comm_queues["to"]["server"], + comm_queues["from"]["server"], ) instrument_comm_subsystem = InstrumentComm( - queues["to"]["instrument_comm"], queues["from"]["instrument_comm"] + comm_queues["to"]["instrument_comm"], + comm_queues["from"]["instrument_comm"], + data_queues["main"], + data_queues["file_writer"], ) cloud_comm_subsystem = CloudComm( - queues["to"]["cloud_comm"], queues["from"]["cloud_comm"], **_get_user_config_settings(parsed_args) + comm_queues["to"]["cloud_comm"], + comm_queues["from"]["cloud_comm"], + **_get_user_config_settings(parsed_args), ) # future for subsystems to set if they experience an error. The server will report the error in the future to the UI @@ -120,14 +127,24 @@ async def main(command_line_args: list[str]) -> None: logger.info("Program exiting") -# TODO consider moving this to a different file -def create_system_queues() -> dict[str, Any]: +# TODO consider moving these two to a different file +def create_system_comm_queues() -> dict[str, Any]: return { - direction: {subsystem: asyncio.Queue() for subsystem in ("server", "instrument_comm", "cloud_comm")} + direction: { + subsystem: asyncio.Queue() + for subsystem in ("server", "instrument_comm", "cloud_comm", "file_writer") + } for direction in ("to", "from") } +def create_system_data_queues() -> dict[str, Any]: + return { + receiving_subsystem: asyncio.Queue() + for receiving_subsystem in ("file_writer", "data_analyzer", "main") + } + + def _parse_cmd_line_args(command_line_args: list[str]) -> dict[str, Any]: parser = argparse.ArgumentParser() parser.add_argument( @@ -178,7 +195,7 @@ def initialize_system_state(parsed_args: dict[str, Any], log_file_id: uuid.UUID) system_state = { # main - "system_status": SystemStatuses.SERVER_INITIALIZING_STATE, + "system_status": SystemStatuses.SERVER_INITIALIZING, "in_simulation_mode": False, "stimulation_protocol_statuses": [], # updating @@ -211,8 +228,6 @@ def _log_system_info() -> None: uname_release = getattr(uname, "release") uname_version = getattr(uname, "version") - computer_name_hash = hashlib.sha512(socket.gethostname().encode(encoding="UTF-8")).hexdigest() - for msg in ( f"System: {uname_sys}", f"Release: {uname_release}", @@ -224,7 +239,7 @@ def _log_system_info() -> None: f"Architecture: {platform.architecture()}", f"Interpreter is 64-bits: {sys.maxsize > 2**32}", f"System Alias: {platform.system_alias(uname_sys, uname_release, uname_version)}", - f"SHA512 digest of Computer Name {computer_name_hash}", + f"SHA512 digest of Computer Name {get_hash_of_computer_name()}", ): logger.info(msg) diff --git a/controller/src/controller/main_systems/server.py b/controller/src/controller/main_systems/server.py index 657ac0d..3bac4bb 100644 --- a/controller/src/controller/main_systems/server.py +++ b/controller/src/controller/main_systems/server.py @@ -4,10 +4,13 @@ import functools import json import logging +import os from typing import Any from typing import Awaitable from typing import Callable +import urllib.parse +from pulse3D.constants import NOT_APPLICABLE_H5_METADATA from semver import VersionInfo import websockets from websockets import serve @@ -16,18 +19,21 @@ from ..constants import DEFAULT_SERVER_PORT_NUMBER from ..constants import GENERIC_24_WELL_DEFINITION from ..constants import NUM_WELLS +from ..constants import RECORDINGS_SUBDIR from ..constants import StimulationStates from ..constants import StimulatorCircuitStatuses from ..constants import SystemStatuses from ..constants import VALID_CREDENTIAL_TYPES from ..constants import VALID_STIMULATION_TYPES from ..exceptions import WebsocketCommandError +from ..exceptions import WebsocketCommandNoOpException from ..utils.aio import clean_up_tasks from ..utils.aio import wait_tasks_clean from ..utils.generic import handle_system_error from ..utils.logging import get_redacted_string from ..utils.state_management import ReadOnlyDict from ..utils.stimulation import validate_stim_subprotocol +from ..utils.validation import check_barcode_for_errors logger = logging.getLogger(__name__) @@ -100,7 +106,7 @@ async def run( raise except BaseException: - # Tanner (4/10/23): don't expected this to be reached, but logging just in case + # Tanner (4/10/23): don't expect this to be reached, but logging just in case logger.exception(ERROR_MSG) finally: await clean_up_tasks({self._serve_task}, ERROR_MSG) @@ -185,23 +191,24 @@ async def _consumer(self, websocket: WebSocketServerProtocol) -> None: try: msg = json.loads(await websocket.recv()) except websockets.ConnectionClosed: + logger.error("Failed to read message from UI") return self._log_incoming_message(msg) command = msg["command"] - try: # TODO try using pydantic to define message schema + some other message schema generator (nano message, ask Jason) handler = self._handlers[command] except KeyError as e: raise WebsocketCommandError(f"Unrecognized command from UI: {command}") from e - # TODO make sure the error handling works here try: await handler(self, msg) + except WebsocketCommandNoOpException: + logger.error(f"Command {command} resulted in a no-op") except WebsocketCommandError as e: - logger.error(f"Command {command} failed with error: {e.args[0]}") + e.add_note(f"Command {command} failed") raise def _log_incoming_message(self, msg: dict[str, Any]) -> None: @@ -224,23 +231,27 @@ async def _shutdown(self, *args: Any) -> None: @mark_handler async def _login(self, comm: dict[str, str]) -> None: """Update the customer/user settings.""" - for cred_type in comm: - if cred_type not in VALID_CREDENTIAL_TYPES | {"command"}: - raise WebsocketCommandError(f"Invalid cred type given: {cred_type}") + required_keys = set(VALID_CREDENTIAL_TYPES) | {"command"} + provided_keys = set(comm) + if missing_keys := required_keys - provided_keys: + raise WebsocketCommandError(f"Missing cred type(s): {missing_keys}") + if invalid_keys := provided_keys - required_keys: + raise WebsocketCommandError(f"Invalid cred type(s) given: {invalid_keys}") await self._to_monitor_queue.put(comm) @mark_handler async def _set_latest_software_version(self, comm: dict[str, str]) -> None: """Set the latest available software version.""" + try: version = comm["version"] # check if version is a valid semantic version string. ValueError will be raised if not VersionInfo.parse(version) except KeyError: - raise WebsocketCommandError("Version not specified") + raise WebsocketCommandError("Command missing 'version' value") except ValueError: - raise WebsocketCommandError(f"Invalid version string: {version}") + raise WebsocketCommandError(f"Invalid semver: {version}") await self._to_monitor_queue.put(comm) @@ -252,19 +263,152 @@ async def _set_firmware_update_confirmation(self, comm: dict[str, Any]) -> None: await self._to_monitor_queue.put(comm) + @mark_handler + async def _start_calibration(self, comm: dict[str, Any]) -> None: + """Begin magnetometer calibration recording.""" + system_state = self._get_system_state_ro() + + if system_state["system_status"] == SystemStatuses.CALIBRATING: + raise WebsocketCommandNoOpException() + + if system_state["system_status"] not in ( + valid_states := (SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.IDLE_READY) + ): + raise WebsocketCommandError(f"Cannot calibrate unless in {valid_states}") + if _are_stimulator_checks_running(system_state): + raise WebsocketCommandError("Cannot calibrate while stimulator checks are running") + if _are_any_stim_protocols_running(system_state): + raise WebsocketCommandError("Cannot calibrate while stimulation is running") + + await self._to_monitor_queue.put(comm) + + @mark_handler + async def _start_data_stream(self, comm: dict[str, Any]) -> None: + """Start magnetometer data stream.""" + system_state = self._get_system_state_ro() + system_status = system_state["system_status"] + + if _is_data_streaming(system_state): + raise WebsocketCommandNoOpException() + if system_status != SystemStatuses.IDLE_READY: + raise WebsocketCommandError( + f"Cannot start data stream unless in {SystemStatuses.IDLE_READY.name}" + ) + if _are_stimulator_checks_running(system_state): + raise WebsocketCommandError("Cannot start data stream while stimulator checks are running") + + try: + plate_barcode = comm["plate_barcode"] + except KeyError: + raise WebsocketCommandError("Command missing 'plate_barcode' value") + if not plate_barcode: + raise WebsocketCommandError("Cannot start data stream without a plate barcode present") + if error_message := check_barcode_for_errors(plate_barcode, "plate_barcode"): + raise WebsocketCommandError(f"Plate {error_message}") + + # TODO import MANTARRAY_SERIAL_NUMBER_UUID as INSTRUMENT_SERIAL_NUMBER_UUID in all files. Same for nickname constant + if not all(system_state["instrument_metadata"].values()): # TODO test this + # TODO make a custom error + code for this and move this handling to instrument_comm so it's handled right after getting metadata + raise WebsocketCommandError("Instrument metadata is incomplete") + + await self._to_monitor_queue.put(comm) + + @mark_handler + async def _stop_data_stream(self, comm: dict[str, Any]) -> None: + """Stop magnetometer data stream.""" + system_state = self._get_system_state_ro() + system_status = system_state["system_status"] + + if system_status == SystemStatuses.IDLE_READY: + raise WebsocketCommandNoOpException() + if system_status not in (SystemStatuses.BUFFERING, SystemStatuses.LIVE_VIEW_ACTIVE): + raise WebsocketCommandError(f"Cannot stop data stream while in {system_status.name}") + + await self._to_monitor_queue.put(comm) + + @mark_handler + async def _start_recording(self, comm: dict[str, Any]) -> None: + """Start writing data stream to file.""" + # TODO make sure all required params are always sent from UI + system_state = self._get_system_state_ro() + + # TODO make sure this route can only be called in the correct state + + if _is_recording(system_state): + raise WebsocketCommandNoOpException() + + if not isinstance((start_timepoint := comm.get("start_timepoint")), int): + raise WebsocketCommandError(f"Invalid value for 'start_timepoint': {start_timepoint}") + + barcodes_to_validate = ["plate_barcode"] + if _are_any_stim_protocols_running(system_state): + barcodes_to_validate.append("stim_barcode") + else: + comm["stim_barcode"] = NOT_APPLICABLE_H5_METADATA + # check that all required params are given before validating + for barcode_type in barcodes_to_validate: + try: + barcode = comm[barcode_type] + except KeyError: + raise WebsocketCommandError(f"Command missing '{barcode_type}' value") + else: + if error_message := check_barcode_for_errors(barcode, barcode_type): + barcode_label = barcode_type.split("_")[0].title() + raise WebsocketCommandError(f"{barcode_label} {error_message}") + + if comm["platemap"] is not None: + comm["platemap"] = json.loads(urllib.parse.unquote_plus(comm["platemap"])) + + await self._to_monitor_queue.put(comm) + + @mark_handler + async def _stop_recording(self, comm: dict[str, Any]) -> None: + """Stop writing data stream to file and close the file.""" + system_state = self._get_system_state_ro() + + if not _is_recording(system_state): + raise WebsocketCommandNoOpException() + + if not isinstance((stop_timepoint := comm.get("stop_timepoint")), int): + raise WebsocketCommandError(f"Invalid value for 'stop_timepoint': {stop_timepoint}") + + await self._to_monitor_queue.put(comm) + + @mark_handler + async def _update_recording_name(self, comm: dict[str, Any]) -> None: + """Update the name of the most recent recording.""" + system_state = self._get_system_state_ro() + + comm["new_name"] = comm["new_name"].strip() + + if _recording_exists(system_state, comm["new_name"]) and not comm["replace_existing"]: + # immediately sending message back to UI since there is no reason to have SystemMonitor handle doing this + await self._from_monitor_queue.put( + {"communication_type": "update_recording_name", "name_updated": False} + ) + else: + await self._to_monitor_queue.put(comm) + + # TODO make a new route for handling the recording snapshot? + # TODO consider changing this to "set_stim_info" @mark_handler async def _set_stim_protocols(self, comm: dict[str, Any]) -> None: """Set stimulation protocols in program memory and send to instrument.""" - # TODO make sure the UI includes a stim barcode in this msg + + try: + if not comm["stim_barcode"]: + raise WebsocketCommandError("Cannot set stim protocols without a stim barcode present") + except KeyError: + raise WebsocketCommandError("Command missing 'stim_barcode' value") system_state = self._get_system_state_ro() system_status = system_state["system_status"] if _are_any_stim_protocols_running(system_state): - raise WebsocketCommandError("Cannot change protocols while stimulation is running") - if system_status != SystemStatuses.IDLE_READY_STATE: - raise WebsocketCommandError(f"Cannot change protocols while in {system_status.name}") + raise WebsocketCommandError("Cannot set stim protocols while stimulation is running") + if system_status != SystemStatuses.IDLE_READY: + raise WebsocketCommandError(f"Cannot set stim protocols while in {system_status.name}") stim_info = comm["stim_info"] @@ -322,17 +466,14 @@ async def _set_stim_protocols(self, comm: dict[str, Any]) -> None: @mark_handler async def _start_stim_checks(self, comm: dict[str, Any]) -> None: """Start the stimulator impedence checks on the instrument.""" - # TODO make sure the UI includes a stim barcode in this msg system_state = self._get_system_state_ro() if _are_stimulator_checks_running(system_state): - return # nothing to do here + raise WebsocketCommandNoOpException() - if system_state["system_status"] != SystemStatuses.IDLE_READY_STATE: - raise WebsocketCommandError( - f"Cannot start stim check unless in {SystemStatuses.IDLE_READY_STATE.name}" - ) + if system_state["system_status"] != SystemStatuses.IDLE_READY: + raise WebsocketCommandError(f"Cannot start stim check unless in {SystemStatuses.IDLE_READY.name}") if _are_any_stim_protocols_running(system_state): raise WebsocketCommandError("Cannot perform stimulator checks while stimulation is running") @@ -342,12 +483,13 @@ async def _start_stim_checks(self, comm: dict[str, Any]) -> None: except KeyError: raise WebsocketCommandError("Request body missing 'well_indices'") - # TODO figure out if the well idxs are still strings - comm["well_indices"] = [int(idx) for idx in comm["well_indices"]] - # check if barcodes were manually entered and match for barcode_type in ("plate_barcode", "stim_barcode"): - barcode = comm.get(barcode_type) + try: + barcode = comm[barcode_type] + except KeyError: + raise WebsocketCommandError(f"Command missing '{barcode_type}' value") + comm[f"{barcode_type}_is_from_scanner"] = barcode == system_state[barcode_type] await self._to_monitor_queue.put(comm) @@ -355,23 +497,29 @@ async def _start_stim_checks(self, comm: dict[str, Any]) -> None: @mark_handler async def _set_stim_status(self, comm: dict[str, Any]) -> None: """Start or stop stimulation on the instrument.""" - # TODO make sure the UI includes a stim barcode in this msg try: stim_status = comm["running"] except KeyError: - raise WebsocketCommandError("Missing 'running' parameter") + raise WebsocketCommandError("Command missing 'running' value") + + for barcode_type in ("plate_barcode", "stim_barcode"): + try: + if not comm[barcode_type] and stim_status: + raise WebsocketCommandError(f"Cannot start stimulation without a {barcode_type} present") + except KeyError: + raise WebsocketCommandError(f"Command missing '{barcode_type}' value") system_state = self._get_system_state_ro() if stim_status is _are_any_stim_protocols_running(system_state): - return # nothing to do here + raise WebsocketCommandNoOpException() if not system_state["stim_info"]: raise WebsocketCommandError("Protocols have not been set") if stim_status: - if (system_status := system_state["system_status"]) != SystemStatuses.IDLE_READY_STATE: + if (system_status := system_state["system_status"]) != SystemStatuses.IDLE_READY: raise WebsocketCommandError(f"Cannot start stimulation while in {system_status.name}") if not _are_initial_stimulator_checks_complete(system_state): raise WebsocketCommandError( @@ -390,6 +538,23 @@ async def _set_stim_status(self, comm: dict[str, Any]) -> None: # HELPERS +def _is_data_streaming(system_state: ReadOnlyDict) -> bool: + return system_state["system_status"] in ( + SystemStatuses.BUFFERING, + SystemStatuses.LIVE_VIEW_ACTIVE, + SystemStatuses.RECORDING, + ) + + +def _is_recording(system_state: ReadOnlyDict) -> bool: + return system_state["system_status"] == SystemStatuses.RECORDING # type: ignore # mypy doesn't understand that this is a bool + + +def _recording_exists(system_state: ReadOnlyDict, recording_name: str) -> bool: + recording_dir = os.path.join(system_state["base_directory"], RECORDINGS_SUBDIR) + return os.path.exists(os.path.join(recording_dir, recording_name)) + + def _are_any_stim_protocols_running(system_state: ReadOnlyDict) -> bool: stim_statuses = system_state["stimulation_protocol_statuses"] return any(status in (StimulationStates.STARTING, StimulationStates.RUNNING) for status in stim_statuses) diff --git a/controller/src/controller/main_systems/system_monitor.py b/controller/src/controller/main_systems/system_monitor.py index f56106b..9acc6d8 100644 --- a/controller/src/controller/main_systems/system_monitor.py +++ b/controller/src/controller/main_systems/system_monitor.py @@ -11,13 +11,16 @@ from pulse3D.constants import MAIN_FIRMWARE_VERSION_UUID from pulse3D.constants import MANTARRAY_SERIAL_NUMBER_UUID as INSTRUMENT_SERIAL_NUMBER_UUID +from ..constants import CALIBRATION_RECORDING_DUR_SECONDS from ..constants import CURRENT_SOFTWARE_VERSION from ..constants import FW_UPDATE_SUBDIR +from ..constants import MICRO_TO_BASE_CONVERSION from ..constants import StimulationStates from ..constants import StimulatorCircuitStatuses from ..constants import SystemStatuses from ..exceptions import ElectronControllerVersionMismatchError from ..utils.aio import wait_tasks_clean +from ..utils.commands import create_start_recording_command from ..utils.generic import handle_system_error from ..utils.generic import semver_gt from ..utils.state_management import ReadOnlyDict @@ -81,24 +84,24 @@ async def _update_system_status_special_cases(self) -> None: new_system_status: SystemStatuses | None = None match system_state["system_status"]: - case SystemStatuses.SERVER_INITIALIZING_STATE: + case SystemStatuses.SERVER_INITIALIZING: # TODO Tanner (3/15/23): this state just instantly transitions right now, probably not needed anymore - new_system_status = SystemStatuses.SERVER_READY_STATE - case SystemStatuses.SERVER_READY_STATE: + new_system_status = SystemStatuses.SERVER_READY + case SystemStatuses.SERVER_READY: # make sure the electron process is running the same version if ( expected_software_version := system_state.get("expected_software_version") ) and expected_software_version != CURRENT_SOFTWARE_VERSION: raise ElectronControllerVersionMismatchError(expected_software_version) - new_system_status = SystemStatuses.SYSTEM_INITIALIZING_STATE - case SystemStatuses.SYSTEM_INITIALIZING_STATE if ( - # need to wait in SYSTEM_INITIALIZING_STATE until UI connects (indicated by + new_system_status = SystemStatuses.SYSTEM_INITIALIZING + case SystemStatuses.SYSTEM_INITIALIZING if ( + # need to wait in SYSTEM_INITIALIZING until UI connects (indicated by # latest_software_version being set) and instrument completes booting up (indicated by # instrument_metadata being set) system_state["instrument_metadata"] and system_state["latest_software_version"] ): - new_system_status = SystemStatuses.CHECKING_FOR_UPDATES_STATE + new_system_status = SystemStatuses.CHECKING_FOR_UPDATES instrument_metadata = system_state["instrument_metadata"] # send command to cloud comm process to check for latest firmware versions await self._queues["to"]["cloud_comm"].put( @@ -109,9 +112,9 @@ async def _update_system_status_special_cases(self) -> None: "main_fw_version": instrument_metadata[MAIN_FIRMWARE_VERSION_UUID], } ) - case SystemStatuses.UPDATES_NEEDED_STATE if system_state["firmware_updates_accepted"]: + case SystemStatuses.UPDATES_NEEDED if system_state["firmware_updates_accepted"]: if not system_state["firmware_updates_require_download"] or system_state["is_user_logged_in"]: - new_system_status = SystemStatuses.DOWNLOADING_UPDATES_STATE + new_system_status = SystemStatuses.DOWNLOADING_UPDATES fw_update_dir_path = ( None @@ -132,16 +135,16 @@ async def _update_system_status_special_cases(self) -> None: await self._queues["to"]["server"].put( {"communication_type": "user_input_needed", "input_type": "user_creds"} ) - case SystemStatuses.UPDATES_NEEDED_STATE if system_state["firmware_updates_accepted"] is False: - # firmware_updates_accepted value will be None before a user has made a decision, so need to explicitly check that it is False - new_system_status = SystemStatuses.IDLE_READY_STATE - case SystemStatuses.INSTALLING_UPDATES_STATE: + # firmware_updates_accepted value will be None before a user has made a decision, so need to explicitly check that it is False + case SystemStatuses.UPDATES_NEEDED if system_state["firmware_updates_accepted"] is False: + new_system_status = SystemStatuses.IDLE_READY + case SystemStatuses.INSTALLING_UPDATES: # these two values get reset to None after their respective installs complete if ( system_state["main_firmware_update"] is None and system_state["channel_firmware_update"] is None ): - new_system_status = SystemStatuses.UPDATES_COMPLETE_STATE + new_system_status = SystemStatuses.UPDATES_COMPLETE await self._send_enable_sw_auto_install_message() if new_system_status: @@ -217,6 +220,45 @@ async def _handle_comm_from_server(self) -> None: action = "accepted" if update_accepted else "declined" logger.info(f"User {action} firmware update(s)") system_state_updates["firmware_updates_accepted"] = update_accepted + case {"command": "start_calibration"}: + system_state_updates["system_status"] = SystemStatuses.CALIBRATING + await self._queues["to"]["file_writer"].put( + create_start_recording_command( + system_state, start_recording_time_index=0, is_calibration_recording=True + ) + ) + await self._queues["to"]["file_writer"].put( + { + "command": "stop_recording", + "stop_timepoint": CALIBRATION_RECORDING_DUR_SECONDS * MICRO_TO_BASE_CONVERSION, + } + ) + + await self._queues["to"]["instrument_comm"].put( + {"command": "start_data_stream", "is_calibration_recording": True} + ) + case {"command": "start_data_stream"}: + # it's fine to switch the status here since buffering isn't a status directly related to the instrument + system_state_updates["system_status"] = SystemStatuses.BUFFERING + await self._queues["to"]["file_writer"].put(communication) + await self._queues["to"]["instrument_comm"].put(communication) + case {"command": "stop_data_stream"}: + # need to wait for the data stream to actually stop before transitiontion back to idle ready, so no status transition here + await self._queues["to"]["file_writer"].put(communication) + await self._queues["to"]["instrument_comm"].put(communication) + case {"command": "start_recording"}: + system_state_updates["system_status"] = SystemStatuses.RECORDING + await self._queues["to"]["file_writer"].put( + create_start_recording_command( + system_state, + start_recording_time_index=communication["start_timepoint"], + platemap_info=communication["platemap_info"], + is_calibration_recording=False, + ) + ) + case {"command": "stop_recording" | "update_recording_name"}: + # neither of these command require additional processing here + await self._queues["to"]["file_writer"].put(communication) case {"command": "set_stim_status", "running": status}: num_protocols = len(system_state["stim_info"]["protocols"]) if status: @@ -234,10 +276,9 @@ async def _handle_comm_from_server(self) -> None: await self._queues["to"]["instrument_comm"].put({"command": command}) case {"command": "set_stim_protocols", "stim_info": stim_info}: system_state_updates["stim_info"] = stim_info - chunked_stim_info, *_ = chunk_protocols_in_stim_info(stim_info) - await self._queues["to"]["instrument_comm"].put( - {**communication, "stim_info": chunked_stim_info} - ) + chunked_stim_info = chunk_protocols_in_stim_info(stim_info) + await self._queues["to"]["instrument_comm"].put({**communication, **chunked_stim_info}) + # TODO send to file writer ? case {"command": "start_stim_checks", "well_indices": well_indices}: system_state_updates["stimulator_circuit_statuses"] = { well_idx: StimulatorCircuitStatuses.CALCULATING.name.lower() @@ -258,10 +299,17 @@ async def _handle_comm_from_instrument_comm(self) -> None: communication = await self._queues["from"]["instrument_comm"].get() system_state = self._system_state_manager.data - system_state_updates: dict[str, Any] = {} match communication: + case {"command": "get_board_connection_status", "in_simulation_mode": in_simulation_mode}: + system_state_updates["in_simulation_mode"] = in_simulation_mode + case {"command": "get_metadata", **metadata}: + system_state_updates["instrument_metadata"] = metadata + case {"command": "start_data_stream"}: + pass # system will switch into live view active state once enough data packets have been sent to the UI + case {"command": "stop_data_stream"}: + system_state_updates["system_status"] = SystemStatuses.IDLE_READY case {"command": "set_stim_protocols"}: pass # nothing to do here case {"command": "start_stimulation"}: @@ -287,8 +335,6 @@ async def _handle_comm_from_instrument_comm(self) -> None: await self._queues["to"]["server"].put( {"communication_type": "stimulator_circuit_statuses", **update} ) - case {"command": "get_board_connection_status", "in_simulation_mode": in_simulation_mode}: - system_state_updates["in_simulation_mode"] = in_simulation_mode case {"command": "get_barcode", "barcode": barcode}: barcode_type = "stim_barcode" if barcode.startswith("MS") else "plate_barcode" # if barcode didn't change, then no need to create an update @@ -302,8 +348,6 @@ async def _handle_comm_from_instrument_comm(self) -> None: "new_barcode": barcode, } ) - case {"command": "get_metadata", **metadata}: - system_state_updates["instrument_metadata"] = metadata case {"command": "firmware_update_complete", "firmware_type": firmware_type}: key = f"{firmware_type}_firmware_update" fw_version = system_state[key] @@ -322,13 +366,35 @@ async def _handle_comm_from_instrument_comm(self) -> None: if system_state_updates: await self._system_state_manager.update(system_state_updates) + async def _handle_comm_from_file_writer(self) -> None: + while True: + communication = await self._queues["from"]["file_writer"].get() + + system_state_updates: dict[str, Any] = {} + + match communication: + case {"command": "start_data_stream" | "stop_data_stream"}: + pass # nothing to do here + case {"command": "start_recording"}: + pass # nothing to do here + case {"command": "stop_recording"}: + if communication.get("is_calibration_recording"): + await self._queues["to"]["instrument_comm"].put({"command": "stop_data_stream"}) + else: + system_state_updates["system_status"] = SystemStatuses.LIVE_VIEW_ACTIVE + case {"command": "update_recording_name"}: + await self._queues["to"]["server"].put( + {"communication_type": "update_recording_name", "name_updated": True} + ) + case invalid_comm: + raise NotImplementedError(f"Invalid communication from FileWriter: {invalid_comm}") + async def _handle_comm_from_cloud_comm(self) -> None: # TODO could make try making these first 4 boiler plates lines reusable somehow while True: communication = await self._queues["from"]["cloud_comm"].get() system_state = self._system_state_manager.data - system_state_updates: dict[str, Any] = {} match communication: @@ -341,7 +407,7 @@ async def _handle_comm_from_cloud_comm(self) -> None: ) case {"command": "check_versions", "error": _}: # error will be logged by cloud comm - system_state_updates["system_status"] = SystemStatuses.IDLE_READY_STATE + system_state_updates["system_status"] = SystemStatuses.IDLE_READY case {"command": "check_versions"}: system_state_updates["firmware_updates_require_download"] = communication["download"] @@ -365,7 +431,7 @@ async def _handle_comm_from_cloud_comm(self) -> None: if (main_fw_update_needed or channel_fw_update_needed) and min_sw_version_available: logger.info("Firmware update(s) found") - system_state_updates["system_status"] = SystemStatuses.UPDATES_NEEDED_STATE + system_state_updates["system_status"] = SystemStatuses.UPDATES_NEEDED system_state_updates["main_firmware_update"] = ( latest_main_fw if main_fw_update_needed else None ) @@ -381,11 +447,11 @@ async def _handle_comm_from_cloud_comm(self) -> None: ) else: logger.info("No firmware updates found") - system_state_updates["system_status"] = SystemStatuses.IDLE_READY_STATE + system_state_updates["system_status"] = SystemStatuses.IDLE_READY # since no updates available, also enable auto install of SW update await self._send_enable_sw_auto_install_message() case {"command": "download_firmware_updates"}: - system_state_updates["system_status"] = SystemStatuses.INSTALLING_UPDATES_STATE + system_state_updates["system_status"] = SystemStatuses.INSTALLING_UPDATES # Tanner (1/13/22): send both firmware update commands at once, and make sure channel is sent first. # If both are sent, the second will be ignored by instrument comm until the first install completes for firmware_type in ("channel", "main"): diff --git a/controller/src/controller/subsystems/__init__.py b/controller/src/controller/subsystems/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/controller/src/controller/subsystems/file_writer.py b/controller/src/controller/subsystems/file_writer.py new file mode 100644 index 0000000..a42a5bf --- /dev/null +++ b/controller/src/controller/subsystems/file_writer.py @@ -0,0 +1,515 @@ +# -*- coding: utf-8 -*- +import asyncio +from collections import deque +from collections import namedtuple +import datetime +import json +import logging +import os +import tempfile +from typing import Any +import uuid + +import h5py +from nptyping import NDArray +import numpy as np +from pulse3D.constants import DATETIME_STR_FORMAT as METADATA_DATETIME_STR_FORMAT +from pulse3D.constants import IS_CALIBRATION_FILE_UUID +from pulse3D.constants import METADATA_UUID_DESCRIPTIONS +from pulse3D.constants import NOT_APPLICABLE_H5_METADATA +from pulse3D.constants import PLATE_BARCODE_UUID +from pulse3D.constants import START_RECORDING_TIME_INDEX_UUID +from pulse3D.constants import STIMULATION_PROTOCOL_UUID +from pulse3D.constants import STIMULATION_READINGS +from pulse3D.constants import TIME_INDICES +from pulse3D.constants import TIME_OFFSETS +from pulse3D.constants import TISSUE_SENSOR_READINGS +from pulse3D.constants import UTC_BEGINNING_DATA_ACQUISTION_UUID +from pulse3D.constants import UTC_BEGINNING_RECORDING_UUID +from pulse3D.plate_recording import MantarrayH5FileCreator + +from ..constants import CURRENT_RECORDING_FILE_VERSION +from ..constants import FILE_WRITER_BUFFER_SIZE_MILLISECONDS +from ..constants import NUM_MAG_DATA_CHANNELS_PER_WELL +from ..constants import NUM_MAG_SENSORS_PER_WELL +from ..constants import NUM_WELLS +from ..utils.aio import wait_tasks_clean +from ..utils.generic import handle_system_error + + +logger = logging.getLogger(__name__) + +RecordingBounds = namedtuple("RecordingBounds", ["start", "stop"]) + +# TODO move these to pulse3D +UTC_BEGINNING_CALIBRATION_UUID = uuid.UUID("b0995a2e-8f1d-41d7-b369-54ec06656683") +CALIBRATION_TIME_INDICES = f"calibration_{TIME_INDICES}" +CALIBRATION_TIME_OFFSETS = f"calibration_{TIME_OFFSETS}" +CALIBRATION_TISSUE_SENSOR_READINGS = f"calibration_{TISSUE_SENSOR_READINGS}" +STIMULATION_READINGS_TEMPLATE = f"{STIMULATION_READINGS}_{{protocol_idx}}" + + +ERROR_MSG = "IN FILE WRITER" + +# Tanner (5/17/21): Not sure what this value represents, should add comment if/when it is determined +MAX_DATA_LEN = 100 * 3600 * 12 + + +def _get_earliest_required_stim_idx( + stim_timepoints: NDArray[(1, Any), int], earliest_mag_time_idx: int +) -> int: + return max(np.argmax(stim_timepoints > earliest_mag_time_idx) - 1, 0) # type: ignore + + +class FileWriter: + """Subsystem that manages writing data to file.""" + + def __init__( + self, + from_monitor_queue: asyncio.Queue[dict[str, Any]], + to_monitor_queue: asyncio.Queue[dict[str, Any]], + data_queue: asyncio.Queue[dict[str, Any]], + recordings_directory: str, + ) -> None: + # comm queues + self._from_monitor_queue = from_monitor_queue + self._to_monitor_queue = to_monitor_queue + # data queue + self._data_queue = data_queue + + self._recordings_directory = recordings_directory + self._calibration_tmp_dir: tempfile.TemporaryDirectory | None = None # type: ignore [type-arg] + + self._current_calibration_path: str | None = None + self._current_recording_name: str | None = None + self._current_recording_file: MantarrayH5FileCreator | None = None + + self._recording_time_idx_bounds = RecordingBounds(None, None) + + self._is_calibration_recording = False + + self._stim_info: dict[str, Any] | None = None + self._start_data_stream_timestamp_utc: datetime.datetime | None = None + self._end_of_mag_stream_reached = False + self._end_of_stim_stream_reached = False + + self._mag_data_buffer: deque[dict[str, Any]] = deque() + self._stim_data_buffers: dict[int, NDArray[(2, Any), int]] = dict() + + # PROPERTIES + + @property + def _is_recording(self) -> bool: + return self._recording_time_idx_bounds.start is not None + + @property + def _current_recording_path(self) -> str | None: + if not self._calibration_tmp_dir: + raise NotImplementedError("self._calibration_tmp_dir should never be None here") + + if not self._current_recording_name: + return None + + recording_dir = ( + self._calibration_tmp_dir.name if self._is_calibration_recording else self._recordings_directory + ) + return os.path.join(recording_dir, self._current_recording_name) + + @property + def _num_stim_protocols(self) -> int: + if not self._stim_info: + return 0 + return len(self._stim_info["protocols"]) + + # ONE-SHOT TASKS + + async def run(self, system_error_future: asyncio.Future[int]) -> None: + logger.info("Starting FileWriter") + + # TODO asyncio.shield functions that edit files? Need to make sure edits are done fully, i.e. add either all datasets during file creation or none of them + + try: + self._calibration_tmp_dir = tempfile.TemporaryDirectory() + + tasks = { + asyncio.create_task(self._handle_comm_from_monitor()), + asyncio.create_task(self._handle_incoming_data()), + } + await wait_tasks_clean(tasks, error_msg=ERROR_MSG) + except asyncio.CancelledError: + logger.info("FileWriter cancelled") + raise + except BaseException as e: + logger.exception(ERROR_MSG) + handle_system_error(e, system_error_future) + finally: + if self._current_recording_file: + await self._handle_file_close() + if self._calibration_tmp_dir: + self._calibration_tmp_dir.cleanup() + logger.info("FileWriter shut down") + + # INFINITE TASKS + + async def _handle_comm_from_monitor(self) -> None: + while True: + comm_from_monitor = await self._from_monitor_queue.get() + + match comm_from_monitor: + case {"command": "start_data_stream"}: + self._start_data_stream_timestamp_utc = datetime.datetime.utcnow() + case {"command": "stop_data_stream"}: + self._start_data_stream_timestamp_utc = None + if self._current_recording_file: + await self._handle_file_close() + case {"command": "start_recording"}: + await self._start_recording(comm_from_monitor) + case {"command": "stop_recording"}: + await self._stop_recording(comm_from_monitor) + case {"command": "update_recording_name"}: + await self._update_recording_name(comm_from_monitor) + case {"command": "set_stim_protocols", "stim_info": stim_info}: + self._stim_info = stim_info + await self._reset_stim_data_buffers() + if self._is_recording: + await self._create_stim_datasets() + + if comm_from_monitor["command"] != "stop_recording": + await self._to_monitor_queue.put(comm_from_monitor) + + async def _handle_incoming_data(self) -> None: + while True: + data_packet = await self._data_queue.get() + + match data_packet["data_type"]: + case "magnetometer": + await self._process_mag_data_packet(data_packet) + case "stimulation": + await self._process_stim_data_packet(data_packet) + case invalid_data_type: + raise NotImplementedError( + f"Invalid data type from Instrument Comm Process: {invalid_data_type}" + ) + + # COMMAND HANDLERS + + async def _start_recording(self, command: dict[str, Any]) -> None: + metadata = command["metadata"] + + self._recording_time_idx_bounds._replace(start=metadata[START_RECORDING_TIME_INDEX_UUID]) + self._is_calibration_recording = metadata[IS_CALIBRATION_FILE_UUID] + + if self._is_calibration_recording: + recording_prefix = "Calibration" + # remove old calibration file if one exists + if self._current_calibration_path and os.path.isfile(self._current_calibration_path): + os.remove(self._current_calibration_path) + # set new calibration file path + self._current_calibration_path = self._current_recording_path + else: + recording_prefix = str(metadata[PLATE_BARCODE_UUID]) + + recording_start_timestamp_str = metadata[UTC_BEGINNING_RECORDING_UUID].strftime("%Y_%m_%d__%H_%M_%S") + self._current_recording_name = f"{recording_prefix}__{recording_start_timestamp_str}" + + await self._create_recording_file(metadata) + + if not self._is_calibration_recording: + await self._add_calibration_data_to_recording() + await self._add_protocols_to_recording_files() + await self._record_data_from_buffers() + + async def _stop_recording(self, command: dict[str, Any]) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + + # if the final timpeoint need is not present, then there's nothing else to do yet + if self._current_recording_file[TIME_INDICES][-1] < command["stop_timepoint"]: + self._recording_time_idx_bounds._replace(stop=command["stop_timepoint"]) + return + + # if the final timepoint needed is already present, then clear the recording bounds as the recording can be completed without another data packet + self._recording_time_idx_bounds._replace(start=None) + self._recording_time_idx_bounds._replace(stop=None) + + upper_magnetometer_data_bound = np.argmax( + self._current_recording_file[TIME_INDICES] > command["stop_timepoint"] + ) + + # trim off magnetometer data after stop recording timepoint + for mag_data_type in (TIME_INDICES, TIME_OFFSETS, TISSUE_SENSOR_READINGS): + current_recorded_data = self._current_recording_file[mag_data_type] + current_recorded_data.resize([*current_recorded_data.shape[:-1], upper_magnetometer_data_bound]) + + # trim off stim data after stop recording timepoint + for protocol_idx in range(self._num_stim_protocols): + current_recorded_data = self._current_recording_file[ + STIMULATION_READINGS_TEMPLATE.format(protocol_idx) + ] + if current_recorded_data[0, -1] <= command["stop_timepoint"]: + continue + upper_stim_data_bound = np.argmax(current_recorded_data[0] > command["stop_timepoint"]) + current_recorded_data.resize([*current_recorded_data.shape[:-1], upper_stim_data_bound]) + + await self._handle_file_close() + + async def _update_recording_name(self, command: dict[str, str]) -> None: + if not self._current_recording_path: + raise NotImplementedError("self._current_recording_path should never be None here") + if not self._current_recording_name: + raise NotImplementedError("self._current_recording_name should never be None here") + + new_recording_path = self._current_recording_path.replace( + self._current_recording_name, command["new_name"] + ) + os.rename(self._current_recording_path, new_recording_path) + self._current_recording_name = command["new_name"] + + # DATA HANDLERS + + async def _process_mag_data_packet(self, data_packet: dict[str, Any]) -> None: + if data_packet["is_first_packet_of_stream"]: + self._end_of_mag_stream_reached = False + self._mag_data_buffer.clear() + if not self._end_of_mag_stream_reached: + self._mag_data_buffer.append(data_packet) + await self._update_data_buffers() + + if self._is_recording: + await self._handle_recording_of_mag_data_packet(data_packet) + + async def _process_stim_data_packet(self, data_packet: dict[str, Any]) -> None: + if data_packet["is_first_packet_of_stream"]: + self._end_of_stim_stream_reached = False + await self._reset_stim_data_buffers() + if not self._end_of_stim_stream_reached: + await self._append_to_stim_data_buffers(data_packet["protocol_statuses"]) + + if self._is_recording: + await self._handle_recording_of_stim_statuses(data_packet["protocol_statuses"]) + + # HELPERS + + async def _create_recording_file(self, metadata_for_file: dict[uuid.UUID, Any]) -> None: + if not self._start_data_stream_timestamp_utc: + raise NotImplementedError("self._start_data_stream_timestamp_utc should never be None here") + + self._current_recording_file = MantarrayH5FileCreator( + self._current_recording_path, file_format_version=CURRENT_RECORDING_FILE_VERSION + ) + + metadata_for_file[ + UTC_BEGINNING_DATA_ACQUISTION_UUID + ] = self._start_data_stream_timestamp_utc.strftime(METADATA_DATETIME_STR_FORMAT) + + for this_attr_name, this_attr_value in metadata_for_file.items(): + # apply custom formatting to UTC datetime value + if ( + METADATA_UUID_DESCRIPTIONS[this_attr_name].startswith("UTC Timestamp") + and this_attr_value != NOT_APPLICABLE_H5_METADATA + ): + this_attr_value = this_attr_value.strftime(METADATA_DATETIME_STR_FORMAT) + + # UUIDs must be stored as strings + this_attr_name = str(this_attr_name) # type: ignore + if isinstance(this_attr_value, uuid.UUID): + this_attr_value = str(this_attr_value) + + self._current_recording_file.attrs[this_attr_name] = this_attr_value + + # converting to a string since json does not like UUIDs + self._current_recording_file.attrs["Metadata UUID Descriptions"] = str(METADATA_UUID_DESCRIPTIONS) + + # sampling time values + self._current_recording_file.create_dataset( + TIME_INDICES, (0,), maxshape=(MAX_DATA_LEN,), dtype="uint64", chunks=True + ) + # sampling time offset + self._current_recording_file.create_dataset( + TIME_OFFSETS, + (NUM_MAG_SENSORS_PER_WELL, 0), + maxshape=(NUM_MAG_SENSORS_PER_WELL, MAX_DATA_LEN), + dtype="uint16", + chunks=True, + ) + # magnetometer data (tissue) + self._current_recording_file.create_dataset( + TISSUE_SENSOR_READINGS, + (NUM_WELLS, NUM_MAG_DATA_CHANNELS_PER_WELL, 0), + maxshape=(NUM_WELLS, NUM_MAG_DATA_CHANNELS_PER_WELL, MAX_DATA_LEN), + dtype="uint16", + chunks=True, + ) + # stim data + if self._stim_info: + await self._create_stim_datasets() + + self._current_recording_file.swmr_mode = True + + async def _create_stim_datasets(self) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + + for protocol_idx in range(self._num_stim_protocols): + self._current_recording_file.create_dataset( + STIMULATION_READINGS_TEMPLATE.format(protocol_idx), + (2, 0), + maxshape=(2, MAX_DATA_LEN), + dtype="int64", + chunks=True, + ) + + async def _add_calibration_data_to_recording(self) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + if not self._current_calibration_path: + raise NotImplementedError("self._current_calibration_path should never be None here") + + with h5py.File(self._current_calibration_path, "r") as calibration_file: + self._current_recording_file.attrs[UTC_BEGINNING_CALIBRATION_UUID] = calibration_file.attrs[ + UTC_BEGINNING_RECORDING_UUID + ] + + for new_label, original_label in { + CALIBRATION_TIME_INDICES: TIME_INDICES, + CALIBRATION_TIME_OFFSETS: TIME_OFFSETS, + CALIBRATION_TISSUE_SENSOR_READINGS: TISSUE_SENSOR_READINGS, + }.items(): + self._current_recording_file.create_dataset( + new_label, data=calibration_file[original_label], chunks=True + ) + + async def _add_protocols_to_recording_files(self) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + if not self._stim_info: + raise NotImplementedError("self._stim_info should never be None here") + + self._current_recording_file.attrs[str(STIMULATION_PROTOCOL_UUID)] = json.dumps(self._stim_info) + + async def _record_data_from_buffers(self) -> None: + for data_packet in self._mag_data_buffer: + await self._handle_recording_of_mag_data_packet(data_packet) + await self._handle_recording_of_stim_statuses(self._stim_data_buffers) + + async def _handle_recording_of_mag_data_packet(self, data_packet: dict[str, Any]) -> None: + if self._recording_time_idx_bounds.start is None: # check needed for mypy to be happy + raise NotImplementedError("_recording_time_idx_bounds.start should never be None here") + + time_indices = data_packet[TIME_INDICES] + + if time_indices[-1] < self._recording_time_idx_bounds.start: + # if final data point is less than the recording start time, then there's nothing else to do + return + + upper_time_bound = ( + self._recording_time_idx_bounds.stop + if self._recording_time_idx_bounds.stop is not None + else np.inf + ) + + # if the final timepoint needed is present, then clear the recording bounds as the recording will be complete after this packet + if time_indices[-1] >= upper_time_bound: + self._recording_time_idx_bounds._replace(start=None) + self._recording_time_idx_bounds._replace(stop=None) + + data_window = (self._recording_time_idx_bounds.start <= time_indices) & ( + time_indices <= upper_time_bound + ) + + for data_type in (TIME_INDICES, TIME_OFFSETS, TISSUE_SENSOR_READINGS): + await self._update_dataset(data_packet[data_type][data_window], data_type) + + if self._recording_time_idx_bounds.stop is None: + await self._handle_file_close() + + async def _handle_recording_of_stim_statuses( + self, protocol_statuses: dict[int, NDArray[(2, Any), int]] + ) -> None: + if self._recording_time_idx_bounds.start is None: # check needed for mypy to be happy + raise NotImplementedError("_recording_time_idx_bounds.start should never be None here") + + for protocol_idx, new_stim_statuses in protocol_statuses.items(): + earliest_required_idx = _get_earliest_required_stim_idx( + new_stim_statuses[0], self._recording_time_idx_bounds.start + ) + + upper_idx_bound = None + if ( + self._recording_time_idx_bounds.stop is not None + and new_stim_statuses[0, -1] > self._recording_time_idx_bounds.stop + ): + upper_idx_bound = np.argmax(new_stim_statuses[0] > self._recording_time_idx_bounds.stop) + + await self._update_dataset( + new_stim_statuses[:, earliest_required_idx:upper_idx_bound], + STIMULATION_READINGS_TEMPLATE.format(protocol_idx), + ) + + async def _handle_file_close(self) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + if not self._current_recording_path: + raise NotImplementedError("self._current_recording_path should never be None here") + + self._current_recording_file.close() + + msg_to_main = {"command": "stop_recording"} + + # after h5 close, reopen them and attempt to read. If not possible then add file to list + try: + with h5py.File(self._current_recording_path, "r"): + pass # if file opens, then there is no corruption + except Exception: + # TODO does a different message need to be sent if this is a calibration recording? + msg_to_main["corrupted_file"] = self._current_recording_path + + # TODO does a different message need to be sent if this is a calibration recording? + await self._to_monitor_queue.put(msg_to_main) + + async def _update_dataset(self, new_data: NDArray, dataset: str) -> None: + if not self._current_recording_file: + raise NotImplementedError("self._current_recording_file should never be None here") + + current_recorded_data = self._current_recording_file[dataset] + previous_recorded_data_len = current_recorded_data.shape[-1] + current_recorded_data.resize( + [*current_recorded_data.shape[:-1], previous_recorded_data_len + new_data.shape[-1]] + ) + current_recorded_data[..., previous_recorded_data_len:] = new_data + + async def _update_data_buffers(self) -> None: + """Remove old data packets if necessary""" + curr_buffer_memory_size = ( + self._mag_data_buffer[-1]["time_indices"][0] - self._mag_data_buffer[0]["time_indices"][0] + ) + if curr_buffer_memory_size <= FILE_WRITER_BUFFER_SIZE_MILLISECONDS: + return + + # buffer has grown too large, so need to remove the earliest magnetomer data packet + self._mag_data_buffer.popleft() + + # since a magnetometer data packet was removed, also check to see if the earliest stim data packet + # is now unnecessary + earliest_buffered_mag_time_idx = self._mag_data_buffer[0]["time_indices"][0] + + for protocol_idx in range(self._num_stim_protocols): + buffered_stim_statuses = self._stim_data_buffers[protocol_idx] + earliest_required_stim_idx = _get_earliest_required_stim_idx( + buffered_stim_statuses[0], earliest_buffered_mag_time_idx + ) + self._stim_data_buffers[protocol_idx] = buffered_stim_statuses[:, earliest_required_stim_idx:] + + async def _reset_stim_data_buffers(self) -> None: + self._stim_data_buffers = { + protocol_idx: np.empty((2, 0)) for protocol_idx in range(self._num_stim_protocols) + } + + async def _append_to_stim_data_buffers( + self, protocol_statuses: dict[int, NDArray[(2, Any), int]] + ) -> None: + for protocol_idx in range(self._num_stim_protocols): + if (status_arr := protocol_statuses.get(protocol_idx)) is not None: + self._stim_data_buffers[protocol_idx] = np.concatenate( + self._stim_data_buffers[protocol_idx], status_arr + ) diff --git a/controller/src/controller/subsystems/instrument_comm.py b/controller/src/controller/subsystems/instrument_comm.py index 1bd0db7..555bf87 100644 --- a/controller/src/controller/subsystems/instrument_comm.py +++ b/controller/src/controller/subsystems/instrument_comm.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- import asyncio from collections import namedtuple -import datetime +import dataclasses +from dataclasses import dataclass import logging import struct from time import perf_counter @@ -9,10 +10,18 @@ from zlib import crc32 from aioserial import AioSerial +from nptyping import NDArray +import numpy as np +from pulse3D.constants import TIME_INDICES +from pulse3D.constants import TIME_OFFSETS +from pulse3D.constants import TISSUE_SENSOR_READINGS import serial import serial.tools.list_ports as list_ports from ..constants import CURI_VID +from ..constants import MAX_MC_REBOOT_DURATION_SECONDS +from ..constants import NUM_INITIAL_MAG_PACKETS_TO_DROP +from ..constants import NUM_MAG_DATA_PACKETS_PER_SECOND from ..constants import NUM_WELLS from ..constants import SERIAL_COMM_BAUD_RATE from ..constants import SERIAL_COMM_BYTESIZE @@ -37,6 +46,7 @@ from ..exceptions import InstrumentCommandResponseError from ..exceptions import InstrumentError from ..exceptions import InstrumentFirmwareError +from ..exceptions import InstrumentRebootTimeoutError from ..exceptions import NoInstrumentDetectedError from ..exceptions import SerialCommCommandProcessingError from ..exceptions import SerialCommCommandResponseTimeoutError @@ -47,6 +57,7 @@ from ..exceptions import SerialCommUntrackedCommandResponseError from ..utils.aio import wait_tasks_clean from ..utils.command_tracking import CommandTracker +from ..utils.data_parsing_cy import parse_magnetometer_data from ..utils.data_parsing_cy import parse_stim_data from ..utils.data_parsing_cy import sort_serial_packets from ..utils.generic import handle_system_error @@ -70,11 +81,13 @@ TRACKED_EVENT_NAMES = ( "handshake_sent", "command_sent", - "stim_data_received", "command_response_received", "status_beacon_received", + "magnetometer_data_received", + "stimulation_data_received", ) +# TODO replace all nametuples with dataclasses TimepointsOfEvents = namedtuple( # type: ignore "TimepointsOfEvents", TRACKED_EVENT_NAMES, defaults=[None] * len(TRACKED_EVENT_NAMES) # type: ignore ) @@ -111,26 +124,28 @@ class InstrumentComm: def __init__( self, from_monitor_queue: asyncio.Queue[dict[str, Any]], - to_monitor_queue: asyncio.Queue[dict[str, Any]], + comm_to_monitor_queue: asyncio.Queue[dict[str, Any]], + data_to_monitor_queue: asyncio.Queue[dict[str, Any]], + data_to_file_writer_queue: asyncio.Queue[dict[str, Any]], hardware_test_mode: bool = False, ) -> None: # comm queues self._from_monitor_queue = from_monitor_queue - self._to_monitor_queue = to_monitor_queue + self._comm_to_monitor_queue = comm_to_monitor_queue - # TODO try making some kind of container for all this data? # instrument self._instrument: AioSerial | VirtualInstrumentConnection | None = None self._hardware_test_mode = hardware_test_mode # instrument comm self._serial_packet_cache = bytes(0) self._command_tracker = CommandTracker() + # data stream + self._data_stream_manager = DataStreamManager( + comm_to_monitor_queue, data_to_monitor_queue, data_to_file_writer_queue + ) # instrument status self._is_waiting_for_reboot = False self._status_beacon_received_event = asyncio.Event() - # stimulation values - self._num_stim_protocols: int = 0 - self._protocols_running: set[int] = set() # firmware updating self._firmware_update_manager: FirmwareUpdateManager | None = None # comm tracking @@ -146,17 +161,6 @@ def _is_updating_firmware(self) -> bool: def _instrument_in_sensitive_state(self) -> bool: return self._is_waiting_for_reboot or self._is_updating_firmware - @property - def _is_stimulating(self) -> bool: - return len(self._protocols_running) > 0 - - @_is_stimulating.setter - def _is_stimulating(self, value: bool) -> None: - if value: - self._protocols_running = set(range(self._num_stim_protocols)) - else: - self._protocols_running = set() - # ONE-SHOT TASKS async def run(self, system_error_future: asyncio.Future[tuple[int, dict[str, str]]]) -> None: @@ -224,7 +228,7 @@ async def _create_connection_to_instrument(self) -> None: else: self._instrument = virtual_instrument - await self._to_monitor_queue.put( + await self._comm_to_monitor_queue.put( { "command": "get_board_connection_status", "in_simulation_mode": isinstance(self._instrument, VirtualInstrumentConnection), @@ -304,6 +308,10 @@ async def _handle_comm_from_monitor(self) -> None: packet_type: int | None = None match comm_from_monitor: + case {"command": "start_data_stream"}: + packet_type = SerialCommPacketTypes.START_DATA_STREAMING + case {"command": "stop_data_stream"}: + packet_type = SerialCommPacketTypes.STOP_DATA_STREAMING case {"command": "start_stim_checks", "well_indices": well_indices}: packet_type = SerialCommPacketTypes.STIM_IMPEDANCE_CHECK bytes_to_send = struct.pack( @@ -316,11 +324,11 @@ async def _handle_comm_from_monitor(self) -> None: case {"command": "set_stim_protocols", "stim_info": stim_info}: packet_type = SerialCommPacketTypes.SET_STIM_PROTOCOL bytes_to_send = convert_stim_dict_to_bytes(stim_info) - if self._is_stimulating and not self._hardware_test_mode: + if self._data_stream_manager.is_stimulating and not self._hardware_test_mode: raise InstrumentCommandAttemptError( "Cannot update stimulation protocols while stimulating" ) - self._num_stim_protocols = len(stim_info["protocols"]) + self._data_stream_manager.set_stim_info(comm_from_monitor) case {"command": "start_stimulation"}: packet_type = SerialCommPacketTypes.START_STIM case {"command": "stop_stimulation"}: @@ -411,9 +419,11 @@ async def _handle_data_stream(self) -> None: f"Timestamp: {timestamp}, Packet Type: {packet_type}, Payload: {packet_payload}" ) from e - # Tanner (2/28/23): there is currently no data stream, so magnetometer packets can be completely ignored. + for data_type in ("magnetometer", "stimulation"): + if sorted_packet_dict[f"{data_type}_stream_info"]["num_packets"] > 0: + self._update_timepoints_of_events(f"{data_type}_data_received") - await self._process_stim_packets(sorted_packet_dict["stimulation_stream_info"]) + await self._data_stream_manager.push(sorted_packet_dict) # TEMPORARY TASKS @@ -431,7 +441,7 @@ async def _handle_firmware_update(self, comm_from_monitor: dict[str, Any]) -> No await self._wait_for_reboot() - await self._to_monitor_queue.put( + await self._comm_to_monitor_queue.put( { "command": "firmware_update_complete", "firmware_type": comm_from_monitor["firmware_type"], @@ -443,9 +453,11 @@ async def _wait_for_reboot(self) -> None: self._is_waiting_for_reboot = True - # TODO raise InstrumentRebootTimeoutError() if this times out + try: + await asyncio.wait_for(self._status_beacon_received_event.wait(), MAX_MC_REBOOT_DURATION_SECONDS) + except asyncio.TimeoutError as e: + raise InstrumentRebootTimeoutError() from e - await self._status_beacon_received_event.wait() logger.info("Instrument completed reboot") self._is_waiting_for_reboot = False @@ -497,7 +509,7 @@ async def _process_comm_from_instrument(self, packet_type: int, packet_payload: barcode = packet_payload.decode("ascii") logger.info(f"Barcode scanned by instrument: {barcode}") barcode_comm = {"command": "get_barcode", "barcode": barcode} - await self._to_monitor_queue.put(barcode_comm) + await self._comm_to_monitor_queue.put(barcode_comm) case SerialCommPacketTypes.GET_ERROR_DETAILS: error_details = parse_instrument_event_info(packet_payload) await self._send_data_packet(SerialCommPacketTypes.ERROR_ACK) @@ -534,6 +546,21 @@ async def _process_command_response(self, packet_type: int, response_data: bytes if not metadata_dict.pop("is_stingray"): raise IncorrectInstrumentConnectedError() prev_command_info.update(metadata_dict) + case "start_data_stream": + if response_data[0]: + if not self._hardware_test_mode: + raise InstrumentCommandResponseError(prev_command_info["command"]) + logger.debug("Data stream already started") # pragma: no cover + else: + base_global_time_of_data_stream = int.from_bytes(response_data[1:9], byteorder="little") + await self._data_stream_manager.activate(base_global_time_of_data_stream) + case "stop_data_stream": + if response_data[0]: + if not self._hardware_test_mode: + raise InstrumentCommandResponseError(prev_command_info["command"]) + logger.debug("Data stream already stopped") # pragma: no cover + else: + await self._data_stream_manager.deactivate() case "start_stim_checks": stimulator_check_dict = convert_stimulator_check_bytes_to_dict(response_data) @@ -555,52 +582,28 @@ async def _process_command_response(self, packet_type: int, response_data: bytes case "set_protocols": if response_data[0]: if not self._hardware_test_mode: - raise InstrumentCommandResponseError("set_protocols") - prev_command_info["hardware_test_message"] = "Command failed" # pragma: no cover + raise InstrumentCommandResponseError(prev_command_info["command"]) + logger.debug("set_protocols command failed") # pragma: no cover case "start_stimulation": # Tanner (10/25/21): if needed, can save _base_global_time_of_data_stream here if response_data[0]: if not self._hardware_test_mode: - raise InstrumentCommandResponseError("start_stimulation") - prev_command_info["hardware_test_message"] = "Command failed" # pragma: no cover - prev_command_info["timestamp"] = datetime.datetime.utcnow() - self._is_stimulating = True + raise InstrumentCommandResponseError(prev_command_info["command"]) + logger.debug("start_stimulation command failed") # pragma: no cover + self._data_stream_manager.is_stimulating = True case "stop_stimulation": if response_data[0]: if not self._hardware_test_mode: - raise InstrumentCommandResponseError("stop_stimulation") - prev_command_info["hardware_test_message"] = "Command failed" # pragma: no cover - self._is_stimulating = False + raise InstrumentCommandResponseError(prev_command_info["command"]) + logger.debug("stop_stimulation command failed") # pragma: no cover + self._data_stream_manager.is_stimulating = False case command if command in INTERMEDIATE_FIRMWARE_UPDATE_COMMANDS: if self._firmware_update_manager is None: raise NotImplementedError("_firmware_update_manager should never be None here") await self._firmware_update_manager.update(command, response_data) if prev_command_info["command"] not in INTERMEDIATE_FIRMWARE_UPDATE_COMMANDS: - await self._to_monitor_queue.put(prev_command_info) - - async def _process_stim_packets(self, stim_stream_info: dict[str, bytes | int]) -> None: - if not stim_stream_info["num_packets"]: - return - - self._update_timepoints_of_events("stim_data_received") - - # Tanner (2/28/23): there is currently no data stream, so only need to check for protocols that have completed - - protocol_statuses: dict[int, Any] = parse_stim_data(*stim_stream_info.values()) - - logger.debug("Stim statuses received: %s", protocol_statuses) - - protocols_completed = [ - protocol_idx - for protocol_idx, status_updates_arr in protocol_statuses.items() - if status_updates_arr[1][-1] == STIM_COMPLETE_SUBPROTOCOL_IDX - ] - if protocols_completed: - self._protocols_running -= set(protocols_completed) - await self._to_monitor_queue.put( - {"command": "stim_status_update", "protocols_completed": protocols_completed} - ) + await self._comm_to_monitor_queue.put(prev_command_info) async def _process_status_codes(self, status_codes_dict: dict[str, int], comm_type: str) -> None: # placing this here so that handshake responses also set the event @@ -633,6 +636,225 @@ def _log_dur_since_events(self) -> None: logger.info(f"Duration (seconds) since events: {durs}") +FirstPacketTracker = namedtuple("FirstPacketTracker", ["magnetometer", "stimulation"]) + + +@dataclass +class StimDataBuffers: + raw: dict[int, NDArray[(2, Any), int]] = dataclasses.field(default_factory=dict) + reduced: dict[int, NDArray[(2, Any), int]] = dataclasses.field(default_factory=dict) + + +class DataStreamManager: + def __init__( + self, + comm_to_monitor_queue: asyncio.Queue[dict[str, Any]], + data_to_monitor_queue: asyncio.Queue[dict[str, Any]], + data_to_file_writer_queue: asyncio.Queue[dict[str, Any]], + ) -> None: + self._comm_to_monitor_queue = comm_to_monitor_queue + self._data_to_monitor_queue = data_to_monitor_queue + self._data_to_file_writer_queue = data_to_file_writer_queue + + self._base_global_time_of_data_stream: int | None = None + self._has_packet_been_sent = FirstPacketTracker(magnetometer=False, stimulation=False) + + self._mag_data_buffers: dict[str, Any] = {} + self._stim_status_buffers = StimDataBuffers() + self.protocols_running: set[int] = set() + + # TODO store this data more cleanly + self._subprotocol_idx_mappings: dict[int, dict[int, int]] = {} + self._max_original_subprotocol_idx_counts: dict[int, tuple[int, ...]] = {} + self._curr_original_subprotocol_idxs: list[int | None] + self._curr_original_subprotocol_counts: list[int | None] + self._reset_stim_idx_counters() + + self._reset_mag_data_buffers() + + def _reset_mag_data_buffers(self) -> None: + # TODO make this a dataclass + self._mag_data_buffers = {"raw_bytes": bytearray(0), "num_packets": 0} + + def _reset_stim_idx_counters(self) -> None: + self._curr_original_subprotocol_idxs = [None] * NUM_WELLS + self._curr_original_subprotocol_counts = [None] * NUM_WELLS + + @property + def is_streaming(self) -> bool: + return self._base_global_time_of_data_stream is not None + + @property + def is_stimulating(self) -> bool: + return len(self.protocols_running) > 0 + + @is_stimulating.setter + def is_stimulating(self, value: bool) -> None: + if value: + self._reset_stim_idx_counters() + self.protocols_running = set(range(self.num_stim_protocols)) + else: + self.protocols_running = set() + + @property + def num_stim_protocols(self) -> int: + return len(self._stim_status_buffers.raw) + + @num_stim_protocols.setter + def num_stim_protocols(self, num: int) -> None: + self._stim_status_buffers = StimDataBuffers( + *[ + {protocol_idx: np.empty((2, 0)) for protocol_idx in range(num)} + for _ in range(len(dataclasses.fields(StimDataBuffers))) + ] + ) + + def set_stim_info(self, comm_from_monitor: dict[str, dict[Any, Any]]) -> None: + self.num_stim_protocols = len(comm_from_monitor["stim_info"]["protocols"]) + self._subprotocol_idx_mappings = comm_from_monitor["subprotocol_idx_mappings"] + self._max_original_subprotocol_idx_counts = comm_from_monitor["max_subprotocol_idx_counts"] + + async def activate(self, base_global_time_of_data_stream: int) -> None: + self._base_global_time_of_data_stream = base_global_time_of_data_stream + self._has_packet_been_sent = FirstPacketTracker(magnetometer=False, stimulation=False) + + # send any buffered stim statuses + for status_type, buffered_protocol_statuses in dataclasses.asdict(self._stim_status_buffers).items(): + protocol_statuses: dict[int, Any] = {} + for protocol_idx, stim_statuses in buffered_protocol_statuses.items(): + if stim_statuses.shape[1] > 0 and stim_statuses[1][-1] != STIM_COMPLETE_SUBPROTOCOL_IDX: + protocol_statuses[protocol_idx] = stim_statuses + + if protocol_statuses: + await self._dump_packet( + {"data_type": f"{status_type}_stimulation", "protocol_statuses": protocol_statuses} + ) + + async def deactivate(self) -> None: + self._base_global_time_of_data_stream = None + # TODO anything else? + + async def push(self, sorted_packets: dict[str, Any]) -> None: + for data_type in ("magnetometer", "stimulation"): + handler_fn = getattr(self, f"_push_{data_type}") + await handler_fn(sorted_packets[f"{data_type}_stream_info"]) + + async def _push_magnetometer(self, stream_info: dict[str, Any]) -> None: + # if not streaming or no packets, then nothing to do + if not self.is_streaming or not stream_info["num_packets"]: + return + + # update cache values + for key, value in stream_info.items(): + self._mag_data_buffers[key] += value + + current_num_packets = self._mag_data_buffers["num_packets"] + + # don't parse and send to file writer unless there is at least 1 second worth of data + if current_num_packets < NUM_MAG_DATA_PACKETS_PER_SECOND: + return + + # parse magnetometer data + # TODO make sure that parse_magnetometer_data maps from module ID to well idx correctly + parsed_mag_data_dict = parse_magnetometer_data( + *self._mag_data_buffers.values(), self._base_global_time_of_data_stream + ) + + await self._dump_packet({"data_type": "magnetometer", **parsed_mag_data_dict}) + + # reset cache now that all mag data has been parsed + self._reset_mag_data_buffers() + + async def _push_stimulation(self, stream_info: dict[str, Any]) -> None: + protocol_statuses: dict[int, Any] = parse_stim_data(*stream_info.values()) + + logger.debug("Stim statuses received: %s", protocol_statuses) + + # update buffers and dump packets if neccesary + reduced_protocol_statuses: dict[int, Any] = {} + for protocol_idx, status_updates_arr in protocol_statuses.items(): + self._stim_status_buffers.raw[protocol_idx] = np.hstack( + [self._stim_status_buffers.raw[protocol_idx][:, -1], status_updates_arr] + ) + if ( + reduced_well_status_arr := self._reduce_subprotocol_chunks(protocol_idx, status_updates_arr) + ).shape[1] > 0: + self._stim_status_buffers.reduced[protocol_idx] = np.hstack( + [self._stim_status_buffers.reduced[protocol_idx][:, -1], reduced_well_status_arr] + ) + reduced_protocol_statuses[protocol_idx] = reduced_protocol_statuses + + if self.is_streaming: + await self._dump_packet({"data_type": "raw_stimulation", "protocol_statuses": protocol_statuses}) + if reduced_protocol_statuses: + await self._dump_packet( + {"data_type": "reduced_stimulation", "protocol_statuses": reduced_protocol_statuses} + ) + + protocols_completed = [ + protocol_idx + for protocol_idx, status_updates_arr in protocol_statuses.items() + if status_updates_arr[1][-1] == STIM_COMPLETE_SUBPROTOCOL_IDX + ] + if protocols_completed: + self.protocols_running -= set(protocols_completed) + await self._comm_to_monitor_queue.put( + {"command": "stim_status_update", "protocols_completed": protocols_completed} + ) + + async def _dump_packet(self, data_packet: dict[str, Any]) -> None: + data_type = data_packet["data_type"] + data_packet["is_first_packet_of_stream"] = not self._has_packet_been_sent._asdict()[data_type] + + if data_type == "magnetometer": + if data_packet["is_first_packet_of_stream"]: + for data_type in (TIME_INDICES, TIME_OFFSETS, TISSUE_SENSOR_READINGS): + data_packet = data_packet[data_type][..., NUM_INITIAL_MAG_PACKETS_TO_DROP:] + await self._data_to_file_writer_queue.put(data_packet) + else: # stim data + for protocol_statuses in data_packet["protocol_statuses"].values(): + protocol_statuses[0] -= self._base_global_time_of_data_stream + queue = self._data_to_file_writer_queue if "raw" in data_type else self._data_to_monitor_queue + await queue.put({**data_packet, "data_type": "stimulation"}) + + self._has_packet_been_sent._replace(**{data_type: True}) + + def _reduce_subprotocol_chunks( + self, protocol_idx: int, protocol_statuses: NDArray[(2, Any), int] + ) -> NDArray[(2, Any), int]: + timepoint_well_status_pairs = [] + for timepoint, chunked_subprotocol_idx in protocol_statuses.T: + original_subprotocol_idx = ( + chunked_subprotocol_idx + if chunked_subprotocol_idx == STIM_COMPLETE_SUBPROTOCOL_IDX + else self._subprotocol_idx_mappings[protocol_idx][chunked_subprotocol_idx] + ) + + if original_subprotocol_idx == STIM_COMPLETE_SUBPROTOCOL_IDX: + timepoint_well_status_pairs.append((timepoint, original_subprotocol_idx)) + continue + + # update idx and reset count if subprotocol idx changed + if original_subprotocol_idx != self._curr_original_subprotocol_idxs[protocol_idx]: + self._curr_original_subprotocol_idxs[protocol_idx] = original_subprotocol_idx + self._curr_original_subprotocol_counts[protocol_idx] = -1 + + curr_count = self._curr_original_subprotocol_counts[protocol_idx] + max_count = self._max_original_subprotocol_idx_counts[protocol_idx][original_subprotocol_idx] + self._curr_original_subprotocol_counts[protocol_idx] = (curr_count + 1) % max_count # type: ignore + + # filter out intermediate idxs + if self._curr_original_subprotocol_counts[protocol_idx] == 0: + timepoint_well_status_pairs.append((timepoint, original_subprotocol_idx)) + + reduced_protocol_statuses = ( + np.array(timepoint_well_status_pairs, dtype=np.int64).T + if timepoint_well_status_pairs + else np.empty((2, 0)) # still need the array to be the correct shape + ) + return reduced_protocol_statuses + + FirmwareUpdateItems = tuple[int, bytes, dict[str, Any]] diff --git a/controller/src/controller/utils/command_tracking.py b/controller/src/controller/utils/command_tracking.py index e622d93..1fdb84a 100644 --- a/controller/src/controller/utils/command_tracking.py +++ b/controller/src/controller/utils/command_tracking.py @@ -25,12 +25,10 @@ async def _start_timer(self) -> None: async def complete(self) -> None: self._timer.cancel() - await self._timer - # TODO see if this helps clean up errors here - # try: - # await self._timer - # except asyncio.CancelledError: - # pass + try: + await self._timer + except asyncio.CancelledError: + pass class CommandTracker: diff --git a/controller/src/controller/utils/commands.py b/controller/src/controller/utils/commands.py new file mode 100644 index 0000000..4eeb352 --- /dev/null +++ b/controller/src/controller/utils/commands.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +import datetime +import json +from typing import Any + +from pulse3D.constants import BACKEND_LOG_UUID +from pulse3D.constants import BOOT_FLAGS_UUID +from pulse3D.constants import CHANNEL_FIRMWARE_VERSION_UUID +from pulse3D.constants import COMPUTER_NAME_HASH_UUID +from pulse3D.constants import CUSTOMER_ACCOUNT_ID_UUID +from pulse3D.constants import INITIAL_MAGNET_FINDING_PARAMS_UUID +from pulse3D.constants import IS_CALIBRATION_FILE_UUID +from pulse3D.constants import MAIN_FIRMWARE_VERSION_UUID +from pulse3D.constants import MANTARRAY_SERIAL_NUMBER_UUID +from pulse3D.constants import NOT_APPLICABLE_H5_METADATA +from pulse3D.constants import PLATE_BARCODE_IS_FROM_SCANNER_UUID +from pulse3D.constants import PLATE_BARCODE_UUID +from pulse3D.constants import PLATEMAP_LABEL_UUID +from pulse3D.constants import PLATEMAP_NAME_UUID +from pulse3D.constants import SOFTWARE_BUILD_NUMBER_UUID +from pulse3D.constants import SOFTWARE_RELEASE_VERSION_UUID +from pulse3D.constants import START_RECORDING_TIME_INDEX_UUID +from pulse3D.constants import STIM_BARCODE_IS_FROM_SCANNER_UUID +from pulse3D.constants import STIM_BARCODE_UUID +from pulse3D.constants import TISSUE_SAMPLING_PERIOD_UUID +from pulse3D.constants import TOTAL_WELL_COUNT_UUID +from pulse3D.constants import USER_ACCOUNT_ID_UUID +from pulse3D.constants import UTC_BEGINNING_RECORDING_UUID + +from .generic import get_hash_of_computer_name +from .state_management import ReadOnlyDict +from ..constants import COMPILED_EXE_BUILD_TIMESTAMP +from ..constants import CURRENT_SOFTWARE_VERSION +from ..constants import DEFAULT_MAG_SAMPLING_PERIOD +from ..constants import NUM_WELLS + + +def create_start_recording_command( + system_state: ReadOnlyDict, + *, + start_recording_time_index: int, + barcodes: dict[str, str] | None = None, + platemap_info: dict[str, Any] | None = None, + is_calibration_recording: bool = False, +) -> dict[str, Any]: + start_recording_timestamp_utc = datetime.datetime.utcnow() + + # barcodes + if not barcodes: + barcodes = {"plate_barcode": NOT_APPLICABLE_H5_METADATA, "stim_barcode": NOT_APPLICABLE_H5_METADATA} + + barcode_metadata = {} + for barcode_type, barcode_uuid, barcode_match_uuid in ( + ("plate_barcode", PLATE_BARCODE_UUID, PLATE_BARCODE_IS_FROM_SCANNER_UUID), + ("stim_barcode", STIM_BARCODE_UUID, STIM_BARCODE_IS_FROM_SCANNER_UUID), + ): + barcode = barcodes[barcode_type] + barcode_metadata[barcode_uuid] = barcode + barcode_metadata[barcode_match_uuid] = barcode == system_state[barcode_type] + + # platemap + formatted_platemap_info = { + "name": str(NOT_APPLICABLE_H5_METADATA), + "labels": [str(NOT_APPLICABLE_H5_METADATA)] * 24, + } + if platemap_info: + formatted_platemap_info["name"] = platemap_info["map_name"] + for label_info in platemap_info["labels"]: + for well_idx in label_info["wells"]: + formatted_platemap_info["labels"][well_idx] = label_info["name"] # type: ignore + + config_settings = system_state["config_settings"] + instrument_metadata = system_state["instrument_metadata"] + + command: dict[str, Any] = { + "communication_type": "recording", + "command": "start_recording", + "metadata": { + # recording + IS_CALIBRATION_FILE_UUID: is_calibration_recording, + UTC_BEGINNING_RECORDING_UUID: start_recording_timestamp_utc, + }, + } + + if not is_calibration_recording: + command["metadata"] |= { + # machine + COMPUTER_NAME_HASH_UUID: get_hash_of_computer_name(), + # software + SOFTWARE_BUILD_NUMBER_UUID: COMPILED_EXE_BUILD_TIMESTAMP, + SOFTWARE_RELEASE_VERSION_UUID: CURRENT_SOFTWARE_VERSION, + # user + CUSTOMER_ACCOUNT_ID_UUID: config_settings.get("customer_id", NOT_APPLICABLE_H5_METADATA), + USER_ACCOUNT_ID_UUID: config_settings.get("username", NOT_APPLICABLE_H5_METADATA), + # session + BACKEND_LOG_UUID: system_state["log_file_id"], + # recording + START_RECORDING_TIME_INDEX_UUID: start_recording_time_index, + # barcodes + **barcode_metadata, + # experiment/analysis + PLATEMAP_NAME_UUID: formatted_platemap_info["name"], + PLATEMAP_LABEL_UUID: formatted_platemap_info["labels"], + TOTAL_WELL_COUNT_UUID: NUM_WELLS, + # instrument + MANTARRAY_SERIAL_NUMBER_UUID: instrument_metadata[MANTARRAY_SERIAL_NUMBER_UUID], + MAIN_FIRMWARE_VERSION_UUID: instrument_metadata[MAIN_FIRMWARE_VERSION_UUID], + CHANNEL_FIRMWARE_VERSION_UUID: instrument_metadata[CHANNEL_FIRMWARE_VERSION_UUID], + TISSUE_SAMPLING_PERIOD_UUID: DEFAULT_MAG_SAMPLING_PERIOD, + INITIAL_MAGNET_FINDING_PARAMS_UUID: json.dumps( + dict(instrument_metadata[INITIAL_MAGNET_FINDING_PARAMS_UUID]) + ), + BOOT_FLAGS_UUID: instrument_metadata[BOOT_FLAGS_UUID], + } + + return command diff --git a/controller/src/controller/utils/data_parsing_cy.pyx b/controller/src/controller/utils/data_parsing_cy.pyx index a076a17..75f3078 100644 --- a/controller/src/controller/utils/data_parsing_cy.pyx +++ b/controller/src/controller/utils/data_parsing_cy.pyx @@ -8,6 +8,7 @@ from ..constants import SERIAL_COMM_PAYLOAD_INDEX from ..constants import SERIAL_COMM_CHECKSUM_LENGTH_BYTES from ..constants import SERIAL_COMM_DATA_SAMPLE_LENGTH_BYTES from ..constants import SERIAL_COMM_MAGIC_WORD_BYTES +from ..constants import NUM_MAG_SENSORS_PER_WELL from ..constants import SERIAL_COMM_PACKET_METADATA_LENGTH_BYTES from ..constants import SERIAL_COMM_PACKET_REMAINDER_SIZE_LENGTH_BYTES from ..constants import SERIAL_COMM_TIME_OFFSET_LENGTH_BYTES @@ -42,7 +43,7 @@ DEF NUM_CHANNELS_PER_SENSOR = 3 # these values exist only for importing the constants defined above into the python test suite SERIAL_COMM_MAGIC_WORD_LENGTH_BYTES_CY = MAGIC_WORD_LEN -SERIAL_COMM_NUM_CHANNELS_PER_SENSOR_CY = NUM_CHANNELS_PER_SENSOR +NUM_CHANNELS_PER_MAG_SENSOR_CY = NUM_CHANNELS_PER_SENSOR # convert python constants to C types cdef char[MAGIC_WORD_LEN + 1] MAGIC_WORD = SERIAL_COMM_MAGIC_WORD_BYTES + bytes(1) @@ -54,13 +55,15 @@ cdef int MIN_PACKET_SIZE = SERIAL_COMM_PACKET_METADATA_LENGTH_BYTES cdef int SERIAL_COMM_TIME_OFFSET_LENGTH_BYTES_C_INT = SERIAL_COMM_TIME_OFFSET_LENGTH_BYTES cdef int SERIAL_COMM_DATA_SAMPLE_LENGTH_BYTES_C_INT = SERIAL_COMM_DATA_SAMPLE_LENGTH_BYTES -cdef int SERIAL_COMM_NUM_CHANNELS_PER_SENSOR_C_INT = NUM_CHANNELS_PER_SENSOR +cdef int NUM_CHANNELS_PER_MAG_SENSOR_C_INT = NUM_CHANNELS_PER_SENSOR +cdef int NUM_MAG_SENSORS_PER_WELL_C_INT = NUM_MAG_SENSORS_PER_WELL cdef int SERIAL_COMM_PAYLOAD_INDEX_C_INT = SERIAL_COMM_PAYLOAD_INDEX cdef int SERIAL_COMM_STIM_STATUS_PACKET_TYPE_C_INT = SerialCommPacketTypes.STIM_STATUS cdef int TOTAL_NUM_WELLS_C_INT = NUM_WELLS +cdef int TOTAL_NUM_SENSORS_C_INT = TOTAL_NUM_WELLS_C_INT * NUM_MAG_SENSORS_PER_WELL_C_INT cdef packed struct Packet: @@ -194,6 +197,63 @@ cpdef dict sort_serial_packets(unsigned char [:] read_bytes): } +cpdef dict parse_magnetometer_data( + unsigned char [:] mag_data_packet_bytes, + int num_mag_data_packets, + uint64_t base_global_time, +): + mag_data_packet_bytes = mag_data_packet_bytes.copy() # make sure data is C contiguous + cdef int magnetometer_data_packet_len = len(mag_data_packet_bytes) // num_mag_data_packets + + cdef int num_time_offsets = TOTAL_NUM_SENSORS_C_INT + cdef int num_data_channels = TOTAL_NUM_SENSORS_C_INT * NUM_CHANNELS_PER_MAG_SENSOR_C_INT + + # arrays for storing parsed data + time_indices = np.empty(num_mag_data_packets, dtype=np.uint64, order="C") + time_offsets = np.empty((num_time_offsets, num_mag_data_packets), dtype=np.uint16, order="C") + data = np.empty((num_data_channels, num_mag_data_packets), dtype=np.uint16, order="C") + # get memory views of numpy arrays for faster operations + cdef uint64_t [::1] time_indices_view = time_indices + cdef uint16_t [:, ::1] time_offsets_view = time_offsets + cdef uint16_t [:, ::1] data_view = data + + # loop vars + cdef int bytes_idx = 0 + cdef int data_packet_idx + cdef int time_offset_arr_idx, channel_arr_idx + cdef MagnetometerData * data_packet_ptr + cdef SensorData * sensor_data_ptr + cdef int sensor, channel + + for data_packet_idx in range(num_mag_data_packets): + data_packet_ptr = &mag_data_packet_bytes[bytes_idx] + # add to time index array + time_indices_view[data_packet_idx] = ( &data_packet_ptr.time_index)[0] + # add next data points to data array + sensor_data_ptr = &data_packet_ptr.sensor_data + channel_arr_idx = 0 + time_offset_arr_idx = 0 + for sensor in range(TOTAL_NUM_SENSORS_C_INT): + time_offsets_view[time_offset_arr_idx, data_packet_idx] = sensor_data_ptr.time_offset + time_offset_arr_idx += 1 + for channel in range(NUM_CHANNELS_PER_MAG_SENSOR_C_INT): + data_view[channel_arr_idx, data_packet_idx] = sensor_data_ptr.data_points[channel] + channel_arr_idx += 1 + # shift SensorData ptr by appropriate amount + sensor_data_ptr = ( + ( sensor_data_ptr) + + SERIAL_COMM_TIME_OFFSET_LENGTH_BYTES_C_INT + + (NUM_CHANNELS_PER_MAG_SENSOR_C_INT * SERIAL_COMM_DATA_SAMPLE_LENGTH_BYTES_C_INT) + ) + # increment idxs + bytes_idx += magnetometer_data_packet_len + data_packet_idx += 1 + + time_indices -= base_global_time + + return {"time_indices": time_indices, "time_offsets": time_offsets, "data": data} + + cpdef dict parse_stim_data(unsigned char [:] stim_packet_bytes, int num_stim_packets): cdef dict stim_data_dict = {} # dict for storing stim statuses diff --git a/controller/src/controller/utils/generic.py b/controller/src/controller/utils/generic.py index 0f3c808..2374f4d 100644 --- a/controller/src/controller/utils/generic.py +++ b/controller/src/controller/utils/generic.py @@ -2,6 +2,8 @@ """Misc utility functions.""" import asyncio +import hashlib +import socket from semver import VersionInfo @@ -22,6 +24,10 @@ from ..exceptions import WebsocketCommandError +def get_hash_of_computer_name() -> str: + return hashlib.sha512(socket.gethostname().encode(encoding="UTF-8")).hexdigest() + + def semver_gt(version_a: str, version_b: str) -> bool: """Determine if Version A is greater than Version B.""" return VersionInfo.parse(version_a) > VersionInfo.parse(version_b) # type: ignore diff --git a/controller/src/controller/utils/serial_comm.py b/controller/src/controller/utils/serial_comm.py index 207fc40..2190adf 100644 --- a/controller/src/controller/utils/serial_comm.py +++ b/controller/src/controller/utils/serial_comm.py @@ -220,7 +220,7 @@ def convert_status_code_bytes_to_dict(status_code_bytes: bytes) -> dict[str, int status_code_labels = ( "main_status", "index_of_thread_with_error", - *[f"module_{i}_status" for i in range(24)], + *[f"module_{i}_status" for i in range(NUM_WELLS)], ) return {label: status_code_bytes[i] for i, label in enumerate(status_code_labels)} @@ -446,7 +446,8 @@ def convert_stim_bytes_to_dict(stim_bytes: bytes) -> dict[str, Any]: stim_info_dict: dict[str, Any] = { "protocols": [], "protocol_assignments": { - GENERIC_24_WELL_DEFINITION.get_well_name_from_well_index(well_idx): None for well_idx in range(24) + GENERIC_24_WELL_DEFINITION.get_well_name_from_well_index(well_idx): None + for well_idx in range(NUM_WELLS) }, } diff --git a/controller/src/controller/utils/stimulation.py b/controller/src/controller/utils/stimulation.py index 5ccb7a2..9d5a3bf 100644 --- a/controller/src/controller/utils/stimulation.py +++ b/controller/src/controller/utils/stimulation.py @@ -118,16 +118,14 @@ def chunk_stim_nodes( ) -def chunk_protocols_in_stim_info( - stim_info: dict[str, Any] -) -> tuple[dict[str, Any], dict[str, dict[int, int]], dict[str, tuple[int, ...]]]: +def chunk_protocols_in_stim_info(stim_info: dict[str, Any]) -> dict[str, dict[Any, Any]]: # copying so the original dict passed in does not get modified chunked_stim_info = copy.deepcopy(stim_info) subprotocol_idx_mappings = {} max_subprotocol_idx_counts = {} - for protocol in chunked_stim_info["protocols"]: + for protocol_idx, protocol in enumerate(chunked_stim_info["protocols"]): new_subprotocols, chunked_idx_to_original_idx, original_idx_counts, *_ = chunk_stim_nodes( protocol["subprotocols"] ) @@ -135,11 +133,14 @@ def chunk_protocols_in_stim_info( # FW requires top level to be a single loop protocol["subprotocols"] = [{"type": "loop", "num_iterations": 1, "subprotocols": new_subprotocols}] - protocol_id = protocol["protocol_id"] - subprotocol_idx_mappings[protocol_id] = chunked_idx_to_original_idx - max_subprotocol_idx_counts[protocol_id] = tuple(original_idx_counts) + subprotocol_idx_mappings[protocol_idx] = chunked_idx_to_original_idx + max_subprotocol_idx_counts[protocol_idx] = tuple(original_idx_counts) - return chunked_stim_info, subprotocol_idx_mappings, max_subprotocol_idx_counts + return { + "stim_info": chunked_stim_info, + "subprotocol_idx_mappings": subprotocol_idx_mappings, + "max_subprotocol_idx_counts": max_subprotocol_idx_counts, + } def _check_subprotocol_type(subprotocol: dict[str, Any], protocol_id: int, idx: int) -> Any: diff --git a/controller/src/controller/utils/validation.py b/controller/src/controller/utils/validation.py new file mode 100644 index 0000000..c3e4b69 --- /dev/null +++ b/controller/src/controller/utils/validation.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from controller.constants import ALL_VALID_BARCODE_HEADERS +from controller.constants import BARCODE_HEADERS +from controller.constants import BARCODE_LEN + + +def check_barcode_for_errors(barcode: str, barcode_type: str | None = None) -> str: + """Return error message if barcode contains an error. + + barcode_type kwarg should always be given unless checking a scanned + barcode value. + """ + if len(barcode) != BARCODE_LEN: + return "barcode is incorrect length" + if (header := barcode[:2]) not in BARCODE_HEADERS.get(barcode_type, ALL_VALID_BARCODE_HEADERS): + return f"barcode contains invalid header: '{header}'" + return (_check_new_barcode if "-" in barcode else _check_old_barcode)(barcode) + + +def _check_new_barcode(barcode: str) -> str: + for char in barcode[2:10] + barcode[-1]: + if not char.isnumeric(): + return f"barcode contains invalid character: '{char}'" + if int(year := barcode[2:4]) < 22: + return f"barcode contains invalid year: '{year}'" + if not 0 < int(julian_date := barcode[4:7]) < 366: + return f"barcode contains invalid Julian date: '{julian_date}'" + if not 0 <= int(experiment_id := barcode[7:10]) < 300: + return f"barcode contains invalid experiment id: '{experiment_id}'" + # Tanner (4/26/23): all barcodes at the moment have 2 as the final digit + if (last_digit := int(barcode[-1])) != 2: + return f"barcode contains invalid last digit: '{last_digit}'" + return "" + + +def _check_old_barcode(barcode: str) -> str: + for char in barcode[2:]: + if not char.isnumeric(): + return f"barcode contains invalid character: '{char}'" + if int(year := barcode[2:6]) < 2021: + return f"barcode contains invalid year: '{year}'" + if not 0 < int(julian_date := barcode[6:9]) < 366: + return f"barcode contains invalid Julian date: '{julian_date}'" + return "" diff --git a/controller/tests/helpers.py b/controller/tests/helpers.py index debc83a..afc212b 100644 --- a/controller/tests/helpers.py +++ b/controller/tests/helpers.py @@ -7,6 +7,7 @@ from controller.constants import MICRO_TO_BASE_CONVERSION from controller.constants import MICROS_PER_MILLI from controller.constants import NUM_WELLS +from controller.constants import SERIAL_COMM_MAX_TIMESTAMP_VALUE from controller.constants import STIM_MAX_DUTY_CYCLE_DURATION_MICROSECONDS from controller.constants import STIM_MAX_DUTY_CYCLE_PERCENTAGE from controller.constants import STIM_MAX_SUBPROTOCOL_DURATION_MICROSECONDS @@ -58,6 +59,10 @@ def random_well_idx(): return randint(0, NUM_WELLS - 1) +def random_serial_comm_timestamp(): + return randint(0, SERIAL_COMM_MAX_TIMESTAMP_VALUE) + + def random_stim_type(): return choice(list(VALID_STIMULATION_TYPES)) @@ -254,7 +259,7 @@ def get_random_stim_loop(): raise NotImplementedError("TODO") -def create_random_stim_info(): +def get_random_stim_info(): protocol_ids = (None, "A", "B", "C", "D") stim_info = { "protocols": [ @@ -286,12 +291,10 @@ def create_random_stim_info(): }, } - if all(protocol_id is None for protocol_id in stim_info["protocol_assignments"].values()): - # make sure at least one well has a protocol assigned - stim_info["protocol_assignments"]["A1"] = "A" - elif all(protocol_id is not None for protocol_id in stim_info["protocol_assignments"].values()): - # make sure at least one well does not have a protocol assigned - stim_info["protocol_assignments"]["A1"] = None + # make sure all protocols are actually assigned and at least one well does not have a protocol + for well_idx, protocol_id in enumerate(protocol_ids): + well_name = GENERIC_24_WELL_DEFINITION.get_well_name_from_well_index(well_idx) + stim_info["protocol_assignments"][well_name] = protocol_id return stim_info @@ -340,3 +343,8 @@ def assert_subprotocol_node_bytes_are_expected(actual, expected): else: assert actual[1] == expected[1], "Invalid subprotocol idx" assert_subprotocol_pulse_bytes_are_expected(actual[2:], expected[2:]) + + +def compare_exceptions(e1, e2): + # from https://stackoverflow.com/questions/15844131/comparing-exception-objects-in-python + return type(e1) is type(e2) and e1.args == e2.args diff --git a/controller/tests/main_systems/test_server.py b/controller/tests/main_systems/test_server.py index bf524ea..2598500 100644 --- a/controller/tests/main_systems/test_server.py +++ b/controller/tests/main_systems/test_server.py @@ -1,39 +1,78 @@ # -*- coding: utf-8 -*- import asyncio +import json +from random import choice +from random import randint +import urllib import uuid +from controller.constants import ErrorCodes +from controller.constants import StimulationStates +from controller.constants import StimulatorCircuitStatuses +from controller.constants import SystemStatuses +from controller.constants import VALID_CREDENTIAL_TYPES +from controller.exceptions import WebsocketCommandError from controller.main import initialize_system_state +from controller.main_systems import server from controller.main_systems.server import Server -from controller.utils.aio import clean_up_tasks +from controller.utils.aio import wait_tasks_clean from controller.utils.state_management import SystemStateManager +from pulse3D.constants import NOT_APPLICABLE_H5_METADATA import pytest +import pytest_asyncio from websockets import connect from websockets.server import WebSocketServerProtocol +from ..helpers import get_random_stim_info +from ..helpers import random_bool +from ..helpers import random_semver +from ..helpers import random_well_idx +from ..helpers import TEST_PLATE_BARCODE +from ..helpers import TEST_STIM_BARCODE + WS_URI = "ws://localhost:4565" -@pytest.fixture(scope="function", name="test_server_items") -def fixture__test_server_items(mocker): +ALL_SYSTEM_STATUSES = frozenset(SystemStatuses.__members__.values()) + + +class ServerTestRunner: + def __init__(self, server): + self._server = server + self._run_task = None + + async def run(self, system_error_future, server_running_event): + self._run_task = asyncio.create_task(self._server.run(system_error_future, server_running_event)) + await server_running_event.wait() + return self._run_task + + async def clean_up(self): + if self._run_task: + await wait_tasks_clean({self._run_task}) + + +@pytest_asyncio.fixture(scope="function", name="test_server_items") +async def fixture__test_server_items(mocker): ssm = SystemStateManager() - asyncio.run( - ssm.update( - initialize_system_state({"base_directory": None, "expected_software_version": None}, uuid.uuid4()) - ) + await ssm.update( + initialize_system_state({"base_directory": None, "expected_software_version": None}, uuid.uuid4()) ) from_monitor_queue = asyncio.Queue() to_monitor_queue = asyncio.Queue() server = Server(ssm.get_read_only_copy, from_monitor_queue, to_monitor_queue) + test_runner = ServerTestRunner(server) + yield { "server": server, "system_state_manager": ssm, "from_monitor_queue": from_monitor_queue, "to_monitor_queue": to_monitor_queue, + "run": test_runner.run, } - # TODO any teardown needed here? + await test_runner.clean_up() @pytest.mark.asyncio @@ -42,7 +81,7 @@ async def test_Server__handles_new_connections_correctly(test_server_items, mock spied_handle_comm = mocker.spy(test_server, "_handle_comm") - server_run_task = asyncio.create_task(test_server.run(asyncio.Future(), asyncio.Event())) + await test_server_items["run"](asyncio.Future(), asyncio.Event()) assert not test_server._ui_connection_made.is_set() assert test_server._websocket is None @@ -52,5 +91,813 @@ async def test_Server__handles_new_connections_correctly(test_server_items, mock assert test_server._ui_connection_made.is_set() assert isinstance(test_server._websocket, WebSocketServerProtocol) spied_handle_comm.assert_called_once() + # make a second connection and make sure it is ignored + async with connect(WS_URI): + spied_handle_comm.assert_called_once() + + +@pytest.mark.asyncio +async def test_Server__handles_disconnect(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + + system_error_future = asyncio.Future() + run_task = await test_server_items["run"](system_error_future, asyncio.Event()) + async with connect(WS_URI): + pass + await wait_tasks_clean({run_task}) + + spied_report.assert_not_called() + # this will not be done since there was no error + assert not system_error_future.done() + + +@pytest.mark.asyncio +async def test_Server__handles_cancellation(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + + system_error_future = asyncio.Future() + run_task = await test_server_items["run"](system_error_future, asyncio.Event()) + async with connect(WS_URI): + run_task.cancel() + await wait_tasks_clean({run_task}) + + spied_report.assert_called_once_with(system_error_future) + # this will not be done since there was no error + assert not system_error_future.done() + + +@pytest.mark.asyncio +async def test_Server__passes_msg_from_incoming_queue_to_websocket_client(test_server_items): + test_from_monitor_queue = test_server_items["from_monitor_queue"] + + test_msg = {"communication_type": "test"} + + await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await test_from_monitor_queue.put(test_msg) + assert json.loads(await client.recv()) == test_msg + + +@pytest.mark.asyncio +async def test_Server__logs_incoming_command(test_server_items, mocker): + spied_log_info = mocker.spy(server.logger, "info") + + test_comm = {"command": "test"} + + await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_comm)) + + spied_log_info.assert_any_call(f"Comm from UI: {test_comm}") + + +@pytest.mark.asyncio +async def test_Server__handles_uncrecognized_command(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + + system_error_future = asyncio.Future() + await test_server_items["run"](system_error_future, asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps({"command": "fake"})) + + spied_report.assert_called_once_with(system_error_future) + assert system_error_future.result() == (ErrorCodes.UI_SENT_BAD_DATA, {}) + + +@pytest.mark.asyncio +async def test_Server__handles_command_that_results_in_no_op(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + spied_log_error = mocker.spy(server.logger, "error") + + test_command = "stop_recording" + + system_error_future = asyncio.Future() + await test_server_items["run"](system_error_future, asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps({"command": test_command})) + + spied_log_error.assert_any_call(f"Command {test_command} resulted in a no-op") + spied_report.assert_not_called() + assert not system_error_future.done() + + +@pytest.mark.asyncio +async def test_Server__handles_failed_command(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + spied_handle_error = mocker.spy(server, "handle_system_error") + + # arbitrarily choosing this command + test_command = "test_cmd" + test_error = WebsocketCommandError("test msg") + test_server._handlers[test_command] = mocker.MagicMock(autospec=True, side_effect=test_error) + + system_error_future = asyncio.Future() + run_task = await test_server_items["run"](system_error_future, asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps({"command": test_command})) + await wait_tasks_clean({run_task}) + + spied_report.assert_called_once_with(system_error_future) + assert system_error_future.result() == (ErrorCodes.UI_SENT_BAD_DATA, {}) + + spied_handle_error.assert_called_once_with(test_error, system_error_future) + assert f"Command {test_command} failed" in test_error.__notes__ + + +@pytest.mark.asyncio +async def test_Server__handles_shutdown_command(test_server_items, mocker): + test_server = test_server_items["server"] + + spied_report = mocker.spy(test_server, "_report_system_error") + + system_error_future = asyncio.Future() + run_task = await test_server_items["run"](system_error_future, asyncio.Event()) + + async with connect(WS_URI) as client: + await client.send(json.dumps({"command": "shutdown"})) + await wait_tasks_clean({run_task}) + + assert test_server.user_initiated_shutdown is True + + spied_report.assert_not_called() + assert not system_error_future.done() + + +@pytest.mark.asyncio +async def test_Server__handles_login_command__success(test_server_items): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + + test_command = {"command": "login"} | {cred: "val" for cred in VALID_CREDENTIAL_TYPES} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_login_command__missing_cred(test_server_items, mocker): + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "login"} | {cred: "val" for cred in VALID_CREDENTIAL_TYPES} + test_missing_cred = choice(list(VALID_CREDENTIAL_TYPES)) + test_command.pop(test_missing_cred) + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Missing cred type(s): {set([test_missing_cred])}" + + +@pytest.mark.asyncio +async def test_Server__handles_login_command__invalid_cred(test_server_items, mocker): + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_invalid_cred = "bad_cred" + test_command = {"command": "login", test_invalid_cred: "val"} | { + cred: "val" for cred in VALID_CREDENTIAL_TYPES + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Invalid cred type(s) given: {set([test_invalid_cred])}" + + +@pytest.mark.asyncio +async def test_Server__handles_set_latest_software_version_command__success(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + + test_command = {"command": "set_latest_software_version", "version": random_semver()} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_set_latest_software_version_command__version_missing( + test_server_items, mocker +): + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "set_latest_software_version"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Command missing 'version' value" + + +@pytest.mark.asyncio +async def test_Server__handles_set_latest_software_version_command__invalid_version( + test_server_items, mocker +): + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_bad_version = "bad" + test_command = {"command": "set_latest_software_version", "version": test_bad_version} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Invalid semver: {test_bad_version}" + + +@pytest.mark.asyncio +async def test_Server__handles_set_firmware_update_confirmation_command__success(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + + test_command = {"command": "set_firmware_update_confirmation", "update_accepted": random_bool()} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_set_firmware_update_confirmation_command__invalid_update_accepted_value( + test_server_items, mocker +): + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_bad_val = "bad" + test_command = {"command": "set_firmware_update_confirmation", "update_accepted": test_bad_val} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Invalid value for update_accepted: {test_bad_val}" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("test_system_status", [SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.IDLE_READY]) +async def test_Server__handles_start_calibration_command__success(test_system_status, test_server_items): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + test_command = {"command": "start_calibration"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_start_calibration_command__no_op(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.CALIBRATING}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_calibration"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 0 + spied_handle_error.assert_not_called() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "test_system_status", + ALL_SYSTEM_STATUSES + - {SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.CALIBRATING, SystemStatuses.IDLE_READY}, +) +async def test_Server__handles_start_calibration_command__invalid_system_status( + test_system_status, test_server_items, mocker +): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_calibration"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert ( + str(actual_error) + == f"Cannot calibrate unless in {(SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.IDLE_READY)}" + ) + + +@pytest.mark.asyncio +async def test_Server__handles_start_calibration_command__stim_checks_running(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + + await ssm.update( + { + "system_status": choice([SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.IDLE_READY]), + "stimulator_circuit_statuses": { + random_well_idx(): StimulatorCircuitStatuses.CALCULATING.name.lower() + }, + } + ) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_calibration"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Cannot calibrate while stimulator checks are running" + + +@pytest.mark.asyncio +async def test_Server__handles_start_calibration_command__stim_running(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + + await ssm.update( + { + "system_status": choice([SystemStatuses.CALIBRATION_NEEDED, SystemStatuses.IDLE_READY]), + "stimulation_protocol_statuses": [ + choice([StimulationStates.STARTING, StimulationStates.RUNNING]) + ], + } + ) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_calibration"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Cannot calibrate while stimulation is running" + + +@pytest.mark.asyncio +async def test_Server__handles_start_data_stream_command__success(test_server_items): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + + test_command = {"command": "start_data_stream", "plate_barcode": TEST_PLATE_BARCODE} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "test_system_status", + [SystemStatuses.BUFFERING, SystemStatuses.LIVE_VIEW_ACTIVE, SystemStatuses.RECORDING], +) +async def test_Server__handles_start_data_stream_command__no_op( + test_system_status, test_server_items, mocker +): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_data_stream", "plate_barcode": TEST_PLATE_BARCODE} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 0 + spied_handle_error.assert_not_called() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "test_system_status", + ALL_SYSTEM_STATUSES + - { + SystemStatuses.IDLE_READY, + SystemStatuses.BUFFERING, + SystemStatuses.LIVE_VIEW_ACTIVE, + SystemStatuses.RECORDING, + }, +) +async def test_Server__handles_start_data_stream_command__invalid_system_status( + test_system_status, test_server_items, mocker +): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_data_stream", "plate_barcode": TEST_PLATE_BARCODE} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Cannot start data stream unless in {SystemStatuses.IDLE_READY.name}" + + +@pytest.mark.asyncio +async def test_Server__handles_start_data_stream_command__stim_checks_running(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + await ssm.update( + { + "system_status": SystemStatuses.IDLE_READY, + "stimulator_circuit_statuses": { + random_well_idx(): StimulatorCircuitStatuses.CALCULATING.name.lower() + }, + } + ) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_data_stream", "plate_barcode": TEST_PLATE_BARCODE} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Cannot start data stream while stimulator checks are running" + + +@pytest.mark.asyncio +async def test_Server__handles_start_data_stream_command__missing_plate_barcode(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_data_stream"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Command missing 'plate_barcode' value" + + +@pytest.mark.asyncio +async def test_Server__handles_start_data_stream_command__empty_plate_barcode(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "start_data_stream", "plate_barcode": ""} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == "Cannot start data stream without a plate barcode present" + + +@pytest.mark.asyncio +async def test_Server__handles_start_data_stream_command__invalid_plate_barcode(test_server_items, mocker): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + spied_check_error = mocker.spy(server, "check_barcode_for_errors") + + test_plate_barcode = "bad" + test_command = {"command": "start_data_stream", "plate_barcode": test_plate_barcode} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + spied_check_error.assert_called_once_with(test_plate_barcode, "plate_barcode") + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Plate {spied_check_error.spy_return}" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("test_system_status", [SystemStatuses.BUFFERING, SystemStatuses.LIVE_VIEW_ACTIVE]) +async def test_Server__handles_stop_data_stream_command__success(test_system_status, test_server_items): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + test_command = {"command": "stop_data_stream"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_stop_data_stream_command__no_op(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "stop_data_stream"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 0 + spied_handle_error.assert_not_called() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("test_system_status", [SystemStatuses.CALIBRATING, SystemStatuses.RECORDING]) +async def test_Server__handles_stop_data_stream_command__invalid_system_status( + test_system_status, test_server_items, mocker +): + ssm = test_server_items["system_state_manager"] + await ssm.update({"system_status": test_system_status}) + + spied_handle_error = mocker.spy(server, "handle_system_error") + + test_command = {"command": "stop_data_stream"} + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + actual_error = spied_handle_error.call_args[0][0] + assert str(actual_error) == f"Cannot stop data stream while in {test_system_status.name}" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("test_stim_running_status", [True, False]) +@pytest.mark.parametrize( + "test_platemap", + [ + None, + {}, + { + "map_name": "test platemap name", + "labels": [{"name": "test-label-1", "wells": [0]}, {"name": "test_label_2", "wells": [1]}], + }, + ], +) +async def test_Server__handles_start_recording_command__success( + test_stim_running_status, test_platemap, test_server_items +): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + + await ssm.update({"system_status": SystemStatuses.LIVE_VIEW_ACTIVE}) + test_command = { + "command": "start_recording", + "start_timepoint": randint(0, 100000), # arbitrary bounds + "plate_barcode": TEST_PLATE_BARCODE, + "stim_barcode": None, + "platemap": urllib.parse.quote_plus(json.dumps(test_platemap)), + } + if test_stim_running_status: + test_command["stim_barcode"] = TEST_STIM_BARCODE + await ssm.update( + { + "stimulation_protocol_statuses": [ + choice([StimulationStates.STARTING, StimulationStates.RUNNING]) + ] + } + ) + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + expected_command = {**test_command, "platemap": test_platemap} + if not test_stim_running_status: + expected_command["stim_barcode"] = NOT_APPLICABLE_H5_METADATA + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == expected_command + + +@pytest.mark.asyncio +async def test_Server__handles_stop_recording_command__success(test_server_items): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + + await ssm.update({"system_status": SystemStatuses.RECORDING}) + test_command = { + "command": "stop_recording", + "stop_timepoint": randint(0, 100000), # arbitrary bounds + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "test_recording_exists,test_replace_existing", [(False, False), (False, True), (True, True)] +) +async def test_Server__handles_update_recording_name_command__success( + test_recording_exists, test_replace_existing, test_server_items, mocker +): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + + mocker.patch.object(server, "_recording_exists", autospec=True, return_value=test_recording_exists) + + test_command = { + "command": "update_recording_name", + "new_name": " NewRecording ", + "replace_existing": test_replace_existing, + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == {**test_command, "new_name": test_command["new_name"].strip()} + + +@pytest.mark.asyncio +async def test_Server__handles_update_recording_name_command__recording_exists_and_not_replacing( + test_server_items, mocker +): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + + mocker.patch.object(server, "_recording_exists", autospec=True, return_value=True) + + test_command = { + "command": "update_recording_name", + "new_name": " NewRecording ", + "replace_existing": False, + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await client.recv() == {"communication_type": "update_recording_name", "name_updated": False} + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 0 + + +@pytest.mark.asyncio +async def test_Server__handles_set_stim_protocols_command__success(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + test_command = { + "command": "set_stim_protocols", + "stim_barcode": TEST_STIM_BARCODE, + "stim_info": get_random_stim_info(), + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + actual = await asyncio.wait_for(test_to_monitor_queue.get(), timeout=1) + # set event since the command handler will not exit until this is done + cpe = actual.pop("command_processed_event") + assert isinstance(cpe, asyncio.Event) + cpe.set() + + await wait_tasks_clean({run_task}) + + assert actual == test_command + + +@pytest.mark.asyncio +async def test_Server__handles_start_stim_checks_command__success(test_server_items, mocker): + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + + await ssm.update({"system_status": SystemStatuses.IDLE_READY}) + test_command = { + "command": "start_stim_checks", + "well_indices": [random_well_idx()], + "plate_barcode": TEST_PLATE_BARCODE, + "stim_barcode": TEST_STIM_BARCODE, + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) + + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == { + "plate_barcode_is_from_scanner": False, + "stim_barcode_is_from_scanner": False, + **test_command, + } + + +@pytest.mark.asyncio +@pytest.mark.parametrize("test_running_status", [True, False]) +async def test_Server__handles_set_stim_status_command__success( + test_running_status, test_server_items, mocker +): + test_running_status + test_to_monitor_queue = test_server_items["to_monitor_queue"] + ssm = test_server_items["system_state_manager"] + + await ssm.update( + { + "system_status": SystemStatuses.IDLE_READY, + "stim_info": get_random_stim_info(), + "stimulator_circuit_statuses": {random_well_idx(): StimulatorCircuitStatuses.MEDIA.name.lower()}, + "stimulation_protocol_statuses": [ + choice([StimulationStates.STOPPING, StimulationStates.INACTIVE]) + if test_running_status + else choice([StimulationStates.STARTING, StimulationStates.RUNNING]) + ], + } + ) + test_command = { + "command": "set_stim_status", + "running": test_running_status, + "plate_barcode": TEST_PLATE_BARCODE, + "stim_barcode": TEST_STIM_BARCODE, + } + + run_task = await test_server_items["run"](asyncio.Future(), asyncio.Event()) + async with connect(WS_URI) as client: + await client.send(json.dumps(test_command)) + await wait_tasks_clean({run_task}) - await clean_up_tasks({server_run_task}) + assert test_to_monitor_queue.qsize() == 1 + assert await test_to_monitor_queue.get() == test_command diff --git a/controller/tests/subsystems/test_instrument_comm.py b/controller/tests/subsystems/test_instrument_comm.py index 525f4ac..1abd9be 100644 --- a/controller/tests/subsystems/test_instrument_comm.py +++ b/controller/tests/subsystems/test_instrument_comm.py @@ -1,32 +1,79 @@ # -*- coding: utf-8 -*- import asyncio +from collections import deque from random import choice +from random import randint +import struct from controller.constants import CURI_VID +from controller.constants import NUM_WELLS from controller.constants import SERIAL_COMM_BAUD_RATE from controller.constants import SERIAL_COMM_BYTESIZE from controller.constants import SERIAL_COMM_READ_TIMEOUT +from controller.constants import SerialCommPacketTypes +from controller.constants import STIM_WELL_IDX_TO_MODULE_ID +from controller.constants import StimulatorCircuitStatuses from controller.constants import STM_VID +from controller.exceptions import InstrumentCommandResponseError from controller.exceptions import NoInstrumentDetectedError from controller.subsystems import instrument_comm from controller.subsystems.instrument_comm import InstrumentComm +from controller.utils.aio import clean_up_tasks +from controller.utils.serial_comm import convert_adc_readings_to_circuit_status +from controller.utils.serial_comm import create_data_packet import pytest import serial from serial.tools.list_ports_common import ListPortInfo from ..fixtures import fixture__wait_tasks_clean +from ..helpers import compare_exceptions +from ..helpers import random_bool +from ..helpers import random_serial_comm_timestamp __fixtures__ = [fixture__wait_tasks_clean] +class MockInstrument: + def __init__(self): + self.recv = deque() + self.send = deque() + + self.in_waiting = 123 # arbitrary number + + async def read_async(self, size): + try: + return self.send.popleft() + except IndexError: + return bytes() + + async def write_async(self, data): + self.recv.append(data) + + +# TODO consider using the simulator in all these tests + + @pytest.fixture(scope="function", name="test_instrument_comm_obj") def fixture__test_instrument_comm_obj(mocker): - ic = InstrumentComm(asyncio.Queue(), asyncio.Queue()) + ic = InstrumentComm(*[asyncio.Queue() for _ in range(4)]) yield ic # TODO any teardown needed here? +@pytest.fixture(scope="function", name="test_instrument_comm_obj_with_connection") +def fixture__test_instrument_comm_obj_with_connection(test_instrument_comm_obj, mocker): + connection = MockInstrument() + + def se(): + test_instrument_comm_obj._instrument = connection + + mocker.patch.object(test_instrument_comm_obj, "_setup", autospec=True, side_effect=se) + + yield test_instrument_comm_obj, connection + # TODO any teardown needed here? + + @pytest.fixture(scope="function", name="patch_comports") def fixture__patch_comports(mocker): comport = "COM1" @@ -69,7 +116,7 @@ async def test_InstrumentComm__creates_connection_to_real_instrument_correctly( assert test_instrument_comm_obj._instrument is mocked_aioserial.return_value - assert test_instrument_comm_obj._to_monitor_queue.get_nowait() == { + assert test_instrument_comm_obj._comm_to_monitor_queue.get_nowait() == { "command": "get_board_connection_status", "in_simulation_mode": False, } @@ -100,7 +147,7 @@ async def test_InstrumentComm__creates_connection_to_virtual_instrument_correctl mocked_vic_init.assert_called_once_with(test_instrument_comm_obj._instrument) mocked_vic_connect.assert_awaited_once_with(test_instrument_comm_obj._instrument) - assert test_instrument_comm_obj._to_monitor_queue.get_nowait() == { + assert test_instrument_comm_obj._comm_to_monitor_queue.get_nowait() == { "command": "get_board_connection_status", "in_simulation_mode": True, } @@ -128,3 +175,196 @@ async def test_InstrumentComm__reports_system_error_if_no_real_or_virtual_instru # TODO make a function for this if it becomes common assert isinstance(mocked_handle_error.call_args[0][0], NoInstrumentDetectedError) assert mocked_handle_error.call_args[0][1] is system_error_future + + +# TODO add tests for each individual step of the setup + + +# TODO in one of the success tests for each of the commands, assert that the correct message was sent to the instrument + + +@pytest.mark.asyncio +async def test_InstrumentComm__handles_start_data_stream_command__success__no_stim_packets_to_be_sent( + test_instrument_comm_obj_with_connection, +): + test_ic, test_instrument = test_instrument_comm_obj_with_connection + + test_global_time_at_stream_start = randint(0, 0xFFFF) # arbitrary range + test_command = {"command": "start_data_stream"} + + run_task = asyncio.create_task(test_ic.run(asyncio.Future())) + + await test_ic._from_monitor_queue.put(test_command) + # set up response + test_instrument.send.append( + create_data_packet( + random_serial_comm_timestamp(), + SerialCommPacketTypes.START_DATA_STREAMING, + bytes([0]) + test_global_time_at_stream_start.to_bytes(8, byteorder="little"), + ) + ) + + assert await asyncio.wait_for(test_ic._comm_to_monitor_queue.get(), timeout=1) == test_command + + assert test_ic._data_stream_manager._base_global_time_of_data_stream == test_global_time_at_stream_start + assert test_ic._data_stream_manager.is_streaming + + assert test_ic._data_stream_manager._data_to_file_writer_queue.qsize() == 0 + + await clean_up_tasks({run_task}) + + +# TODO +# @pytest.mark.asyncio +# async def test_InstrumentComm__handles_start_data_stream_command__success__stim_packets_buffered( +# test_instrument_comm_obj_with_connection, +# ): + + +@pytest.mark.asyncio +async def test_InstrumentComm__handles_start_data_stream_command__fail( + test_instrument_comm_obj_with_connection, mocker +): + test_ic, test_instrument = test_instrument_comm_obj_with_connection + + spied_handle_error = mocker.spy(instrument_comm, "handle_system_error") + + test_command = {"command": "start_data_stream"} + + system_error_future = asyncio.Future() + run_task = asyncio.create_task(test_ic.run(system_error_future)) + + await test_ic._from_monitor_queue.put(test_command) + # set up response + test_instrument.send.append( + create_data_packet( + random_serial_comm_timestamp(), + SerialCommPacketTypes.START_DATA_STREAMING, + bytes([1]), + ) + ) + + await asyncio.wait_for(system_error_future, timeout=1) + + assert compare_exceptions( + spied_handle_error.call_args[0][0], InstrumentCommandResponseError(test_command["command"]) + ) + assert spied_handle_error.call_args[0][1] is system_error_future + + await clean_up_tasks({run_task}) + + +@pytest.mark.asyncio +async def test_InstrumentComm__handles_stop_data_stream_command__success( + test_instrument_comm_obj_with_connection, +): + test_ic, test_instrument = test_instrument_comm_obj_with_connection + + test_command = {"command": "stop_data_stream"} + + run_task = asyncio.create_task(test_ic.run(asyncio.Future())) + + await test_ic._from_monitor_queue.put(test_command) + # set up response + test_instrument.send.append( + create_data_packet( + random_serial_comm_timestamp(), SerialCommPacketTypes.STOP_DATA_STREAMING, bytes([0]) + ) + ) + + assert await asyncio.wait_for(test_ic._comm_to_monitor_queue.get(), timeout=1) == test_command + + assert test_ic._data_stream_manager._base_global_time_of_data_stream is None + assert not test_ic._data_stream_manager.is_streaming + + await clean_up_tasks({run_task}) + + +@pytest.mark.asyncio +async def test_InstrumentComm__handles_stop_data_stream_command__fail( + test_instrument_comm_obj_with_connection, mocker +): + test_ic, test_instrument = test_instrument_comm_obj_with_connection + + spied_handle_error = mocker.spy(instrument_comm, "handle_system_error") + + test_command = {"command": "stop_data_stream"} + + system_error_future = asyncio.Future() + run_task = asyncio.create_task(test_ic.run(system_error_future)) + + await test_ic._from_monitor_queue.put(test_command) + # set up response + test_instrument.send.append( + create_data_packet( + random_serial_comm_timestamp(), + SerialCommPacketTypes.STOP_DATA_STREAMING, + bytes([1]), + ) + ) + + await asyncio.wait_for(system_error_future, timeout=1) + + assert compare_exceptions( + spied_handle_error.call_args[0][0], InstrumentCommandResponseError(test_command["command"]) + ) + assert spied_handle_error.call_args[0][1] is system_error_future + + await clean_up_tasks({run_task}) + + +@pytest.mark.asyncio +async def test_InstrumentComm__handles_start_stim_checks_command__success( + test_instrument_comm_obj_with_connection, +): + test_ic, test_instrument = test_instrument_comm_obj_with_connection + + test_well_indices = list(range(4)) + test_well_indices.extend([i for i in range(4, NUM_WELLS) if random_bool()]) + + # set known adc readings in simulator. these first 4 values are hard coded, if this test fails might need to update them + adc_readings = [(0, 0), (0, 2039), (0, 2049), (1113, 0)] + adc_readings.extend([(i, i + 100) for i in range(NUM_WELLS - len(adc_readings))]) + + test_command = {"command": "start_stim_checks", "well_indices": test_well_indices} + + run_task = asyncio.create_task(test_ic.run(asyncio.Future())) + + await test_ic._from_monitor_queue.put(test_command) + + # set up response + adc_readings_ordered_by_module_id = [None] * NUM_WELLS + for well_idx, readings in enumerate(adc_readings): + module_id = STIM_WELL_IDX_TO_MODULE_ID[well_idx] + adc_readings_ordered_by_module_id[module_id] = readings + response_body = bytes([]) + for module_readings in adc_readings_ordered_by_module_id: + status = convert_adc_readings_to_circuit_status(*module_readings) + response_body += struct.pack("
- - - -
-
- - - + {{ + playState ? stopStimLabel : startStimLabel + }} + + + +
@@ -96,7 +96,7 @@ - + @@ -274,7 +274,12 @@ export default { opacity: this.disabled ? 0.5 : 1, }; }, + isStimInWaiting: function () { + return this.stimStatus === STIM_STATUS.WAITING; + }, isStartStopButtonEnabled: function () { + if (this.isStimInWaiting) return false; + if (!this.playState) { // if starting stim make sure initial magnetometer calibration has been completed and // no additional calibrations are running, stim checks have completed, there are no short or @@ -386,7 +391,7 @@ export default { if (this.invalidImportedProtocols.length > 0) this.$bvModal.show("invalid-imported-protocols"); }, statusUuid: function (new_status) { - if (new_status == SYSTEM_STATUS.IDLE_READY_STATE) { + if (new_status == SYSTEM_STATUS.IDLE_READY) { this.disabled = false; this.disabledToolTip = ""; } @@ -396,6 +401,8 @@ export default { async handlePlayStop(e) { e.preventDefault(); if (this.isStartStopButtonEnabled) { + this.$store.commit("stimulation/setStimStatus", STIM_STATUS.WAITING); + if (this.playState) { this.$store.dispatch(`stimulation/stopStimulation`); clearTimeout(this.stim24hrTimer); // clear 24 hour timer for next stimulation @@ -490,7 +497,7 @@ body { grid-template-columns: repeat(25%, 4); align-items: center; justify-items: center; - padding: 5px; + padding: 2px; } .span__stimulation-controls-play-stop-button--disabled { @@ -571,11 +578,11 @@ body { width: 20px; } -.span__spinner { +.span__config-check-spinner { position: absolute; font-size: 34px; - right: 17.5px; - bottom: 15px; + left: 5px; + top: 0px; width: 45px; color: #fff; padding-left: 5px; @@ -640,6 +647,17 @@ body { fill: none; } +.span__start-stop-spinner { + position: absolute; + font-size: 20px; + right: 2px; + bottom: 3px; + color: #fff; + padding-left: 5px; + background-color: #000; + opacity: 0.75; +} + #user-input-prompt-message, #open-circuit-warning, #stim-24hr-warning, diff --git a/ui/components/stimulation/StimulationStudioCreateAndEdit.vue b/ui/components/stimulation/StimulationStudioCreateAndEdit.vue index 6be6b77..9e20ad9 100644 --- a/ui/components/stimulation/StimulationStudioCreateAndEdit.vue +++ b/ui/components/stimulation/StimulationStudioCreateAndEdit.vue @@ -80,10 +80,13 @@ export default { }; }, computed: { - ...mapState("stimulation", ["protocolList", "editMode"]), + ...mapState("stimulation", ["protocolList", "editMode", "selectedWells"]), editModeStatus: function () { return this.editMode.status; }, + noWellsSelected: function () { + return this.selectedWells.length === 0; + }, }, watch: { protocolList: function (newList, oldList) { @@ -113,7 +116,11 @@ export default { this.$emit("handle-selection-change", selectedProtocol); }, disableSelectionBtn(idx) { - return this.disableEdits || (this.selectedProtocolIdx === 0 && idx === 0); + return ( + this.disableEdits || + (this.selectedProtocolIdx === 0 && idx === 0) || + (this.noWellsSelected && idx === 0) + ); }, handleClick(idx) { if (this.disableSelectionBtn(idx)) { diff --git a/ui/store/modules/stimulation/enums.js b/ui/store/modules/stimulation/enums.js index efcff5b..36b9f8e 100644 --- a/ui/store/modules/stimulation/enums.js +++ b/ui/store/modules/stimulation/enums.js @@ -9,6 +9,8 @@ export const STIM_STATUS = { READY: "Ready", // stim play states STIM_ACTIVE: "Stimulating...", + // used to show spinner in stim controls over play/stop button + WAITING: "", // error SHORT_CIRCUIT_ERROR: "Short Circuit Error", diff --git a/ui/store/modules/system/enums.js b/ui/store/modules/system/enums.js index 0bc4681..f3ec424 100644 --- a/ui/store/modules/system/enums.js +++ b/ui/store/modules/system/enums.js @@ -1,13 +1,13 @@ export const SYSTEM_STATUS = { - SERVER_INITIALIZING_STATE: "04471bcf-1a00-4a0d-83c8-4160622f9a25", - SERVER_READY_STATE: "8e24ef4d-2353-4e9d-aa32-4346126e73e3", - INSTRUMENT_INITIALIZING_STATE: "d2e3d386-b760-4c9a-8b2d-410362ff11c4", - CHECKING_FOR_UPDATES_STATE: "04fd6f6b-ee9e-4656-aae4-0b9584791f36", - IDLE_READY_STATE: "009301eb-625c-4dc4-9e92-1a4d0762465f", - UPDATES_NEEDED_STATE: "d6dcf2a9-b6ea-4d4e-9423-500f91a82a2f", - DOWNLOADING_UPDATES_STATE: "b623c5fa-af01-46d3-9282-748e19fe374c", - INSTALLING_UPDATES_STATE: "19c9c2d6-0de4-4334-8cb3-a4c7ab0eab00", - UPDATES_COMPLETE_STATE: "31f8fbc9-9b41-4191-8598-6462b7490789", + SERVER_INITIALIZING: "04471bcf-1a00-4a0d-83c8-4160622f9a25", + SERVER_READY: "8e24ef4d-2353-4e9d-aa32-4346126e73e3", + INSTRUMENT_INITIALIZING: "d2e3d386-b760-4c9a-8b2d-410362ff11c4", + CHECKING_FOR_UPDATES: "04fd6f6b-ee9e-4656-aae4-0b9584791f36", + IDLE_READY: "b480373b-9466-4fa0-92a6-fa5f8e340d30", + UPDATES_NEEDED: "d6dcf2a9-b6ea-4d4e-9423-500f91a82a2f", + DOWNLOADING_UPDATES: "b623c5fa-af01-46d3-9282-748e19fe374c", + INSTALLING_UPDATES: "19c9c2d6-0de4-4334-8cb3-a4c7ab0eab00", + UPDATES_COMPLETE: "31f8fbc9-9b41-4191-8598-6462b7490789", }; export const ERROR_CODES = { diff --git a/ui/store/modules/system/index.js b/ui/store/modules/system/index.js index a84914a..30dc60e 100644 --- a/ui/store/modules/system/index.js +++ b/ui/store/modules/system/index.js @@ -7,7 +7,7 @@ import { SYSTEM_STATUS } from "./enums"; const defaultState = { port: 4565, // http://localhost:4565/ - statusUuid: SYSTEM_STATUS.SERVER_INITIALIZING_STATE, + statusUuid: SYSTEM_STATUS.SERVER_INITIALIZING, simulationMode: false, barcodeManualMode: false, isConnectedToController: false, diff --git a/ui/store/modules/system/mutations.js b/ui/store/modules/system/mutations.js index 8b31060..1d7fdc7 100644 --- a/ui/store/modules/system/mutations.js +++ b/ui/store/modules/system/mutations.js @@ -5,7 +5,7 @@ const SW_INSTALLER_URL_PREFIX = "https://downloads.curibio.com/software/stingray export default { setStatusUuid(state, newId) { - if (state.statusUuid !== SYSTEM_STATUS.ERROR_STATE) { + if (state.statusUuid !== SYSTEM_STATUS.ERROR) { state.statusUuid = newId; } }, diff --git a/ui/tests/unit/components/status/StatusBar.spec.js b/ui/tests/unit/components/status/StatusBar.spec.js index 563858a..c2a4650 100644 --- a/ui/tests/unit/components/status/StatusBar.spec.js +++ b/ui/tests/unit/components/status/StatusBar.spec.js @@ -40,14 +40,14 @@ describe("StatusWidget.vue", () => { describe("systemStatus", () => { // add test to check that false = not visible test.each([ - ["SERVER_INITIALIZING_STATE", "Status: Booting Up..."], - ["SERVER_READY_STATE", "Status: Connecting..."], - ["INSTRUMENT_INITIALIZING_STATE", "Status: Initializing..."], - ["CHECKING_FOR_UPDATES_STATE", "Status: Checking for Firmware Updates..."], - ["UPDATES_NEEDED_STATE", "Status: Firmware Updates Required"], - ["DOWNLOADING_UPDATES_STATE", "Status: Downloading Firmware Updates..."], - ["INSTALLING_UPDATES_STATE", "Status: Installing Firmware Updates..."], - ["UPDATES_COMPLETE_STATE", "Status: Firmware Updates Complete"], + ["SERVER_INITIALIZING", "Status: Booting Up..."], + ["SERVER_READY", "Status: Connecting..."], + ["INSTRUMENT_INITIALIZING", "Status: Initializing..."], + ["CHECKING_FOR_UPDATES", "Status: Checking for Firmware Updates..."], + ["UPDATES_NEEDED", "Status: Firmware Updates Required"], + ["DOWNLOADING_UPDATES", "Status: Downloading Firmware Updates..."], + ["INSTALLING_UPDATES", "Status: Installing Firmware Updates..."], + ["UPDATES_COMPLETE", "Status: Firmware Updates Complete"], ])( "Given that /shutdown is mocked to return status 200, When Vuex is mutated to the state %s, Then the status text should update to be: %s", async (vuexState, expectedText) => { @@ -67,7 +67,7 @@ describe("StatusWidget.vue", () => { ); test("When initially mounted, Then the status text matches the Vuex state", async () => { const propsData = {}; - store.commit("system/setStatusUuid", SYSTEM_STATUS.SERVER_READY_STATE); + store.commit("system/setStatusUuid", SYSTEM_STATUS.SERVER_READY); wrapper = mount(StatusWidget, { propsData, store, @@ -122,12 +122,12 @@ describe("StatusWidget.vue", () => { }); test.each([ - "SERVER_INITIALIZING_STATE", - "SERVER_READY_STATE", - "INITIALIZING_INSTRUMENT_STATE", - "UPDATES_NEEDED_STATE", - "UPDATES_COMPLETE_STATE", - "ERROR_STATE", + "SERVER_INITIALIZING", + "SERVER_READY", + "INITIALIZING_INSTRUMENT", + "UPDATES_NEEDED", + "UPDATES_COMPLETE", + "ERROR", ])( "When a user wants to exit the desktop app, Then the closure warning modals should not appear if there are no active processes or fw update", async (vuexState) => { @@ -210,28 +210,23 @@ describe("StatusWidget.vue", () => { }); describe("stimStatus", () => { test.each([ - ["IDLE_READY_STATE", "NO_PROTOCOLS_ASSIGNED", "Status: No protocols have been assigned", {}], - ["IDLE_READY_STATE", "CONFIG_CHECK_NEEDED", "Status: Configuration Check Needed", { 1: {} }], - [ - "IDLE_READY_STATE", - "CONFIG_CHECK_IN_PROGRESS", - "Status: Configuration Check in Progress...", - { 1: {} }, - ], - ["IDLE_READY_STATE", "CONFIG_CHECK_COMPLETE", "Status: Configuration Check Complete", { 1: {} }], - ["IDLE_READY_STATE", "READY", "Status: Ready", { 1: {} }], - ["IDLE_READY_STATE", "STIM_ACTIVE", "Status: Stimulating...", { 1: {} }], - ["IDLE_READY_STATE", "SHORT_CIRCUIT_ERROR", "Status: Short Circuit Error", {}], - ["IDLE_READY_STATE", "ERROR", "Status: Error Occurred", {}], - ["SERVER_INITIALIZING_STATE", "CONFIG_CHECK_NEEDED", "Status: Booting Up...", { 1: {} }], - ["INSTRUMENT_INITIALIZING_STATE", "CONFIG_CHECK_IN_PROGRESS", "Status: Initializing...", { 1: {} }], - ["SERVER_READY_STATE", "CONFIG_CHECK_COMPLETE", "Status: Connecting...", { 1: {} }], - ["UPDATES_NEEDED_STATE", "READY", "Status: Firmware Updates Required", { 1: {} }], - ["INSTALLING_UPDATES_STATE", "STIM_ACTIVE", "Status: Installing Firmware Updates...", { 1: {} }], - ["UPDATES_COMPLETE_STATE", "SHORT_CIRCUIT_ERROR", "Status: Firmware Updates Complete", {}], - ["CHECKING_FOR_UPDATES_STATE", "STIM_ACTIVE", "Status: Checking for Firmware Updates...", {}], + ["IDLE_READY", "NO_PROTOCOLS_ASSIGNED", "Status: No protocols have been assigned", {}], + ["IDLE_READY", "CONFIG_CHECK_NEEDED", "Status: Configuration Check Needed", { 1: {} }], + ["IDLE_READY", "CONFIG_CHECK_IN_PROGRESS", "Status: Configuration Check in Progress...", { 1: {} }], + ["IDLE_READY", "CONFIG_CHECK_COMPLETE", "Status: Configuration Check Complete", { 1: {} }], + ["IDLE_READY", "READY", "Status: Ready", { 1: {} }], + ["IDLE_READY", "STIM_ACTIVE", "Status: Stimulating...", { 1: {} }], + ["IDLE_READY", "SHORT_CIRCUIT_ERROR", "Status: Short Circuit Error", {}], + ["IDLE_READY", "ERROR", "Status: Error Occurred", {}], + ["SERVER_INITIALIZING", "CONFIG_CHECK_NEEDED", "Status: Booting Up...", { 1: {} }], + ["INSTRUMENT_INITIALIZING", "CONFIG_CHECK_IN_PROGRESS", "Status: Initializing...", { 1: {} }], + ["SERVER_READY", "CONFIG_CHECK_COMPLETE", "Status: Connecting...", { 1: {} }], + ["UPDATES_NEEDED", "READY", "Status: Firmware Updates Required", { 1: {} }], + ["INSTALLING_UPDATES", "STIM_ACTIVE", "Status: Installing Firmware Updates...", { 1: {} }], + ["UPDATES_COMPLETE", "SHORT_CIRCUIT_ERROR", "Status: Firmware Updates Complete", {}], + ["CHECKING_FOR_UPDATES", "STIM_ACTIVE", "Status: Checking for Firmware Updates...", {}], ])( - "When system status is %s and stim's stimStatus gets mutated to %s, Then the status text should update to be %s if system Uuid is IDLE_READY_STATE", + "When system status is %s and stim's stimStatus gets mutated to %s, Then the status text should update to be %s if system Uuid is IDLE_READY", async (systemVuexState, vuexState, expectedText, assignments) => { const propsData = {}; wrapper = mount(StatusWidget, { diff --git a/virtual-instrument/src/virtual_instrument/constants.py b/virtual-instrument/src/virtual_instrument/constants.py index c914e0f..31ffd7e 100644 --- a/virtual-instrument/src/virtual_instrument/constants.py +++ b/virtual-instrument/src/virtual_instrument/constants.py @@ -1,7 +1,2 @@ # -*- coding: utf-8 -*- - -DEFAULT_SAMPLING_PERIOD = 10000 # valid as of 4/12/22 MICROSECONDS_PER_CENTIMILLISECOND = 10 - -SERIAL_COMM_NUM_CHANNELS_PER_SENSOR = 3 -SERIAL_COMM_NUM_SENSORS_PER_WELL = 3 diff --git a/virtual-instrument/src/virtual_instrument/virtual_instrument.py b/virtual-instrument/src/virtual_instrument/virtual_instrument.py index 9ce4a94..c035030 100644 --- a/virtual-instrument/src/virtual_instrument/virtual_instrument.py +++ b/virtual-instrument/src/virtual_instrument/virtual_instrument.py @@ -15,10 +15,13 @@ from uuid import UUID from zlib import crc32 +from controller.constants import DEFAULT_MAG_SAMPLING_PERIOD from controller.constants import GOING_DORMANT_HANDSHAKE_TIMEOUT_CODE from controller.constants import MAX_MC_REBOOT_DURATION_SECONDS from controller.constants import MICRO_TO_BASE_CONVERSION from controller.constants import MICROS_PER_MILLI +from controller.constants import NUM_CHANNELS_PER_MAG_SENSOR +from controller.constants import NUM_MAG_SENSORS_PER_WELL from controller.constants import SERIAL_COMM_CHECKSUM_LENGTH_BYTES from controller.constants import SERIAL_COMM_HANDSHAKE_PERIOD_SECONDS from controller.constants import SERIAL_COMM_HANDSHAKE_TIMEOUT_SECONDS @@ -58,10 +61,7 @@ from stdlib_utils import InfiniteProcess from stdlib_utils import resource_path -from .constants import DEFAULT_SAMPLING_PERIOD from .constants import MICROSECONDS_PER_CENTIMILLISECOND -from .constants import SERIAL_COMM_NUM_CHANNELS_PER_SENSOR -from .constants import SERIAL_COMM_NUM_SENSORS_PER_WELL from .exceptions import SerialCommInvalidSamplingPeriodError from .exceptions import SerialCommTooManyMissedHandshakesError from .exceptions import UnrecognizedSerialCommPacketTypeError @@ -269,7 +269,7 @@ def _handle_boot_up_config(self, reboot: bool = False) -> None: self._reset_start_time() self._reboot_time_secs = None self._status_codes = [SERIAL_COMM_OKAY_CODE] * (self._num_wells + 2) - self._sampling_period_us = DEFAULT_SAMPLING_PERIOD + self._sampling_period_us = DEFAULT_MAG_SAMPLING_PERIOD self._adc_readings = [(self.default_adc_reading, self.default_adc_reading)] * self._num_wells self._stim_info = {} self._is_stimulating = False @@ -638,8 +638,8 @@ def _create_magnetometer_data_payload(self) -> bytes: SERIAL_COMM_MODULE_ID_TO_WELL_IDX[module_id] + 1 ) # add data points - well_sensor_data = time_offset + (data_value.tobytes() * SERIAL_COMM_NUM_CHANNELS_PER_SENSOR) - well_data = well_sensor_data * SERIAL_COMM_NUM_SENSORS_PER_WELL + well_sensor_data = time_offset + (data_value.tobytes() * NUM_CHANNELS_PER_MAG_SENSOR) + well_data = well_sensor_data * NUM_MAG_SENSORS_PER_WELL magnetometer_data_payload += well_data return magnetometer_data_payload