Commit 35dfb875 authored by Matteo's avatar Matteo
Browse files

update

parent 2ef1c0f4
...@@ -766,14 +766,14 @@ files = [ ...@@ -766,14 +766,14 @@ files = [
[[package]] [[package]]
name = "mpai-cae-arp" name = "mpai-cae-arp"
version = "0.2.3" version = "0.2.5"
description = "The MPAI CAE-ARP software API" description = "The MPAI CAE-ARP software API"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.10,<4.0" python-versions = ">=3.10,<4.0"
files = [ files = [
{file = "mpai_cae_arp-0.2.3-py3-none-any.whl", hash = "sha256:d2d1225310474582e5697499fffa1140cf2d28fc2cb4fdad402446d146aa6e9e"}, {file = "mpai_cae_arp-0.2.5-py3-none-any.whl", hash = "sha256:b6bc27677bccf9893b3a05fe43480012b956d67cea066245f1685408e0f95cee"},
{file = "mpai_cae_arp-0.2.3.tar.gz", hash = "sha256:d5cc6df1708678dfe2dddf76a124a6d13eebd4da2360d4d88963c4be43221236"}, {file = "mpai_cae_arp-0.2.5.tar.gz", hash = "sha256:9aa63135ce674902d6aa90216d0ee2f4aece10a36190d2a2b69f01927c23cbf8"},
] ]
[package.dependencies] [package.dependencies]
...@@ -939,61 +939,68 @@ files = [ ...@@ -939,61 +939,68 @@ files = [
[[package]] [[package]]
name = "orjson" name = "orjson"
version = "3.8.9" version = "3.8.10"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">= 3.7"
files = [ files = [
{file = "orjson-3.8.9-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:5d029843eae7b6cbd6468b63517b8b61471afed6572162171d8b6471b6dbf41f"}, {file = "orjson-3.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4dfe0651e26492d5d929bbf4322de9afbd1c51ac2e3947a7f78492b20359711d"},
{file = "orjson-3.8.9-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:405933c05490efb209d0f940d8ef1403d2932a97e47010a26d2694e9dd49f84d"}, {file = "orjson-3.8.10-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:bc30de5c7b3a402eb59cc0656b8ee53ca36322fc52ab67739c92635174f88336"},
{file = "orjson-3.8.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:183de66eff4d41c330a3006f210ab0bce7affe398da6f6eda9579b67245a34ff"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c08b426fae7b9577b528f99af0f7e0ff3ce46858dd9a7d1bf86d30f18df89a4c"},
{file = "orjson-3.8.9-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb4081fe340ed1df42dddfd055e1d50479cb0ccb976d13e6b5e8667a07fec6f4"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bce970f293825e008dbf739268dfa41dfe583aa2a1b5ef4efe53a0e92e9671ea"},
{file = "orjson-3.8.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d11593a2e736055dd7b9587dbf89cd1cbe4a42a70e70f186e51aee7e1b38902e"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b23fb0264bbdd7218aa685cb6fc71f0dcecf34182f0a8596a3a0dff010c06f9"},
{file = "orjson-3.8.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e20649359e28f34d01b2570e4650a076f439a959bae3a8bbe7f5923ad80f54e8"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0826ad2dc1cea1547edff14ce580374f0061d853cbac088c71162dbfe2e52205"},
{file = "orjson-3.8.9-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c02ece4f36a160c83efe74adfba5f189c7c7702361f02b809ab73744923ee139"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7bce6e61cea6426309259b04c6ee2295b3f823ea51a033749459fe2dd0423b2"},
{file = "orjson-3.8.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f0e19801836cf1b30f333d475b05d79051b8ae8639a8e2422fb5f64e82676ae7"}, {file = "orjson-3.8.10-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0b470d31244a6f647e5402aac7d2abaf7bb4f52379acf67722a09d35a45c9417"},
{file = "orjson-3.8.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d4850fe5650cead3c0f8822192e381cee9d4c3b8162eb082c86c927124572dc6"}, {file = "orjson-3.8.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:48824649019a25d3e52f6454435cf19fe1eb3d05ee697e65d257f58ae3aa94d9"},
{file = "orjson-3.8.9-cp310-none-win_amd64.whl", hash = "sha256:5fd4193f260d9d30112b5e379d0870b54dc88040807c93cbe8d67bfea148ba5a"}, {file = "orjson-3.8.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:faee89e885796a9cc493c930013fa5cfcec9bfaee431ddf00f0fbfb57166a8b3"},
{file = "orjson-3.8.9-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:70eae063ad8d7405dc63873760567b600fc10728ba0da24a69d49c1a5d318d6d"}, {file = "orjson-3.8.10-cp310-none-win_amd64.whl", hash = "sha256:3cfe32b1227fe029a5ad989fbec0b453a34e5e6d9a977723f7c3046d062d3537"},
{file = "orjson-3.8.9-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:251653437632583d02203e6b118b72b99c04425175853f35340f4bac7034a36e"}, {file = "orjson-3.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:2073b62822738d6740bd2492f6035af5c2fd34aa198322b803dc0e70559a17b7"},
{file = "orjson-3.8.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea833751f017ba321c277e7425b51c0b1a18a2c60f8c9c0f4c6c4d7e16cbd6c"}, {file = "orjson-3.8.10-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b2c4faf20b6bb5a2d7ac0c16f58eb1a3800abcef188c011296d1dc2bb2224d48"},
{file = "orjson-3.8.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8563c2cdeb923b82a5cc5bfc76c28c786777428263ee39292d928e9687165fb4"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c1825997232a324911d11c75d91e1e0338c7b723c149cf53a5fc24496c048a4"},
{file = "orjson-3.8.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f33e9ea45b4c9457eedca0c40f38cf5732c91b0fb68f091ac59e6ea68e03eb2"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7e85d4682f3ed7321d36846cad0503e944ea9579ef435d4c162e1b73ead8ac9"},
{file = "orjson-3.8.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855dee152daecb7de7b4cd7069d7854e11aa291687bffe8433156af0a224417e"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8cdaacecb92997916603ab232bb096d0fa9e56b418ca956b9754187d65ca06"},
{file = "orjson-3.8.9-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:74fa9e02589339defc9d3662de9e7eef51d8f9f3a7f6304b43b18b39d7bbf10f"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ddabc5e44702d13137949adee3c60b7091e73a664f6e07c7b428eebb2dea7bbf"},
{file = "orjson-3.8.9-cp311-none-win_amd64.whl", hash = "sha256:6c5b10ba1e62df8f96cbc37f6d5ae9acb3f6475926dea8b1b6a1a60f201a64f7"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27bb26e171e9cfdbec39c7ca4739b6bef8bd06c293d56d92d5e3a3fc017df17d"},
{file = "orjson-3.8.9-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a651123d01bc399fcd866e56acc2d76512e62aae3673652b13b470ea69faf1f4"}, {file = "orjson-3.8.10-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1810e5446fe68d61732e9743592da0ec807e63972eef076d09e02878c2f5958e"},
{file = "orjson-3.8.9-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:73019b6d2cc998c99556020c6bd8f8bc28420c69583186ca290c66a27916a3b7"}, {file = "orjson-3.8.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61e2e51cefe7ef90c4fbbc9fd38ecc091575a3ea7751d56fad95cbebeae2a054"},
{file = "orjson-3.8.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f5c3daa8b02786ad5f0e14ae16a59bbb4e02cbae3a41989a25188e5a6c962ff"}, {file = "orjson-3.8.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f3e9ac9483c2b4cd794e760316966b7bd1e6afb52b0218f068a4e80c9b2db4f6"},
{file = "orjson-3.8.9-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:598598b7f81f8fda7c3e09c88165f844152b7be223bc4ea929ec8ad59b00ea17"}, {file = "orjson-3.8.10-cp311-none-win_amd64.whl", hash = "sha256:26aee557cf8c93b2a971b5a4a8e3cca19780573531493ce6573aa1002f5c4378"},
{file = "orjson-3.8.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:090b10bdb06baae6d5cd3550d772ecbabd833bfceed7592ff167c0a82f5b4c20"}, {file = "orjson-3.8.10-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:11ae68f995a50724032af297c92f20bcde31005e0bf3653b12bff9356394615b"},
{file = "orjson-3.8.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd46f688ddf9c2ea10367446fe9bf3ceba0f7490c15b4f96420491c7f00bb283"}, {file = "orjson-3.8.10-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:35d879b46b8029e1e01e9f6067928b470a4efa1ca749b6d053232b873c2dcf66"},
{file = "orjson-3.8.9-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:b8ed8d780e9fab01bc404a70d755a8b2b34ea6c0b6604b65de135daaaadaf9a9"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:345e41abd1d9e3ecfb554e1e75ff818cf42e268bd06ad25a96c34e00f73a327e"},
{file = "orjson-3.8.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8a32c9fb742868a34346f3c52e12d893a9d27f8e0c0bf3c480db7e6903d8be28"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:45a5afc9cda6b8aac066dd50d8194432fbc33e71f7164f95402999b725232d78"},
{file = "orjson-3.8.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2ba366009b98ac8899e935eff6fef7672d3ea43d3ce9deb3ee33452134b6cc3a"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad632dc330a7b39da42530c8d146f76f727d476c01b719dc6743c2b5701aaf6b"},
{file = "orjson-3.8.9-cp37-none-win_amd64.whl", hash = "sha256:236b9313425cb2570626c64dd5cb6caff13882d1717d491da542cff228b96e97"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf2556ba99292c4dc550560384dd22e88b5cdbe6d98fb4e202e902b5775cf9f"},
{file = "orjson-3.8.9-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8efc7e9ec35336f7cc98b6692536b1262046ff1d2a545295a4d89b8a2495903"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b88afd662190f19c3bb5036a903589f88b1d2c2608fbb97281ce000db6b08897"},
{file = "orjson-3.8.9-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8c7eba3610ae69f4aba4032ecb61b0a6fbd1e4537283d1553eb8c1cb136e9118"}, {file = "orjson-3.8.10-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:abce8d319aae800fd2d774db1106f926dee0e8a5ca85998fd76391fcb58ef94f"},
{file = "orjson-3.8.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7742649e4c357d4e7ad483a35ff5f55d519e895de56772cc486913614ee7d23b"}, {file = "orjson-3.8.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e999abca892accada083f7079612307d94dd14cc105a699588a324f843216509"},
{file = "orjson-3.8.9-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6566fb8daa538c7848fd6822e2409a7e1c41dae8e65e6536598d505f641a318"}, {file = "orjson-3.8.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3fdee68c4bb3c5d6f89ed4560f1384b5d6260e48fbf868bae1a245a3c693d4d"},
{file = "orjson-3.8.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ce8a2a667221e2e5160021e26b09e9c13eeedafb5cda1981340c8c0c0bc8f9d"}, {file = "orjson-3.8.10-cp37-none-win_amd64.whl", hash = "sha256:e5d7f82506212e047b184c06e4bcd48c1483e101969013623cebcf51cf12cad9"},
{file = "orjson-3.8.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0399631b88fa4868956badef2561fba07dffcaf050bf53959ee50d26edf6f6"}, {file = "orjson-3.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:d953e6c2087dcd990e794f8405011369ee11cf13e9aaae3172ee762ee63947f2"},
{file = "orjson-3.8.9-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:189ccb16ed140a824d133fa1c55175cf0d2207edaade54f1db0456a526cb5fd8"}, {file = "orjson-3.8.10-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:81aa3f321d201bff0bd0f4014ea44e51d58a9a02d8f2b0eeab2cee22611be8e1"},
{file = "orjson-3.8.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b707fa4481e1af19b3052ec9352c688bad3f539d7bdd8aa4a451f6dd7e4bae73"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d27b6182f75896dd8c10ea0f78b9265a3454be72d00632b97f84d7031900dd4"},
{file = "orjson-3.8.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c3d988eb562da1dda7d49e9abd8a64b3cabc632b4299d177fb9e0c0ca9f06b8c"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1486600bc1dd1db26c588dd482689edba3d72d301accbe4301db4b2b28bd7aa4"},
{file = "orjson-3.8.9-cp38-none-win_amd64.whl", hash = "sha256:b30240eb6b22daab604f1595f6aacf92bcdac0d29e2d7ad507dfac68d2b39182"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344ea91c556a2ce6423dc13401b83ab0392aa697a97fa4142c2c63a6fd0bbfef"},
{file = "orjson-3.8.9-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:81869a6de00bc676d10056fa8bb28cbe805b1cf498a45c14cb7b1765eee33fcb"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:979f231e3bad1c835627eef1a30db12a8af58bfb475a6758868ea7e81897211f"},
{file = "orjson-3.8.9-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a25a5a215b19d414de8d416a3c5414f29165843a06f704cc0345ded9eac34ac1"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa3a26dcf0f5f2912a8ce8e87273e68b2a9526854d19fd09ea671b154418e88"},
{file = "orjson-3.8.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec0f2bea52e30ea98ce095f1f42da04535791f9a31b2aab2499caa88307bc49"}, {file = "orjson-3.8.10-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b6e79d8864794635974b18821b49a7f27859d17b93413d4603efadf2e92da7a5"},
{file = "orjson-3.8.9-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b91d88fe96b698b28bb1b95b1fce226f72757ab3ab7d8d97551e23bc629c84f"}, {file = "orjson-3.8.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ce49999bcbbc14791c61844bc8a69af44f5205d219be540e074660038adae6bf"},
{file = "orjson-3.8.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7629841ccdcccd3c43ebc6a4165abe9844909fcedb2041994c0153470f610801"}, {file = "orjson-3.8.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2ef690335b24f9272dbf6639353c1ffc3f196623a92b851063e28e9515cf7dd"},
{file = "orjson-3.8.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d875b304e19f4b2758d233bbf2b9d627c66fac50b3150b8d31a35ba6cda3db67"}, {file = "orjson-3.8.10-cp38-none-win_amd64.whl", hash = "sha256:5a0b1f4e4fa75e26f814161196e365fc0e1a16e3c07428154505b680a17df02f"},
{file = "orjson-3.8.9-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:723ec880c5290fe4de330febb8030e57c1978fbd624fc5b9399969e7d7d74984"}, {file = "orjson-3.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:af7601a78b99f0515af2f8ab12c955c0072ffcc1e437fb2556f4465783a4d813"},
{file = "orjson-3.8.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b11f8a71c82d19fce11ce487efeec2ca0dc3bcf5b4564445fecfc68d9c268744"}, {file = "orjson-3.8.10-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bbd7b3a3e2030b03c68c4d4b19a2ef5b89081cbb43c05fe2010767ef5e408db"},
{file = "orjson-3.8.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b2079bf86dec62731c1b90fdfea3211f993f0c894d9261e0ce9b68ed9c9dfbec"}, {file = "orjson-3.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4355c9aedfefe60904e8bd7901315ebbc8bb828f665e4c9bc94b1432e67cb6f7"},
{file = "orjson-3.8.9-cp39-none-win_amd64.whl", hash = "sha256:97d94322a2eaab767ba8d52f6bf9d0ec0f35313fe36287be6e6085dd65d55d37"}, {file = "orjson-3.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b7b0ba074375e25c1594e770e2215941e2017c3cd121889150737fa1123e8bfe"},
{file = "orjson-3.8.9.tar.gz", hash = "sha256:c40bece58c11cb09aff17424d21b41f6f767d2b1252b2f745ec3ff29cce6a240"}, {file = "orjson-3.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34b6901c110c06ab9e8d7d0496db4bc9a0c162ca8d77f67539d22cb39e0a1ef4"},
{file = "orjson-3.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb62ec16a1c26ad9487727b529103cb6a94a1d4969d5b32dd0eab5c3f4f5a6f2"},
{file = "orjson-3.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595e1e7d04aaaa3d41113e4eb9f765ab642173c4001182684ae9ddc621bb11c8"},
{file = "orjson-3.8.10-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:64ffd92328473a2f9af059410bd10c703206a4bbc7b70abb1bedcd8761e39eb8"},
{file = "orjson-3.8.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1f648ec89c6a426098868460c0ef8c86b457ce1378d7569ff4acb6c0c454048"},
{file = "orjson-3.8.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6a286ad379972e4f46579e772f0477e6b505f1823aabcd64ef097dbb4549e1a4"},
{file = "orjson-3.8.10-cp39-none-win_amd64.whl", hash = "sha256:d2874cee6856d7c386b596e50bc517d1973d73dc40b2bd6abec057b5e7c76b2f"},
{file = "orjson-3.8.10.tar.gz", hash = "sha256:dcf6adb4471b69875034afab51a14b64f1026bc968175a2bb02c5f6b358bd413"},
] ]
[[package]] [[package]]
...@@ -1008,6 +1015,73 @@ files = [ ...@@ -1008,6 +1015,73 @@ files = [
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
] ]
[[package]]
name = "pandas"
version = "2.0.0"
description = "Powerful data structures for data analysis, time series, and statistics"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "pandas-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bbb2c5e94d6aa4e632646a3bacd05c2a871c3aa3e85c9bec9be99cb1267279f2"},
{file = "pandas-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5337c87c4e963f97becb1217965b6b75c6fe5f54c4cf09b9a5ac52fc0bd03d3"},
{file = "pandas-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ded51f7e3dd9b4f8b87f2ceb7bd1a8df2491f7ee72f7074c6927a512607199e"},
{file = "pandas-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c858de9e9fc422d25e67e1592a6e6135d7bcf9a19fcaf4d0831a0be496bf21"},
{file = "pandas-2.0.0-cp310-cp310-win32.whl", hash = "sha256:2d1d138848dd71b37e3cbe7cd952ff84e2ab04d8988972166e18567dcc811245"},
{file = "pandas-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:d08e41d96bc4de6f500afe80936c68fce6099d5a434e2af7c7fd8e7c72a3265d"},
{file = "pandas-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24472cfc7ced511ac90608728b88312be56edc8f19b9ed885a7d2e47ffaf69c0"},
{file = "pandas-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ffb14f50c74ee541610668137830bb93e9dfa319b1bef2cedf2814cd5ac9c70"},
{file = "pandas-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c24c7d12d033a372a9daf9ff2c80f8b0af6f98d14664dbb0a4f6a029094928a7"},
{file = "pandas-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8318de0f886e4dcb8f9f36e45a3d6a6c3d1cfdc508354da85e739090f0222991"},
{file = "pandas-2.0.0-cp311-cp311-win32.whl", hash = "sha256:57c34b79c13249505e850d0377b722961b99140f81dafbe6f19ef10239f6284a"},
{file = "pandas-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:8f987ec26e96a8490909bc5d98c514147236e49830cba7df8690f6087c12bbae"},
{file = "pandas-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3ba8f5dd470d8bfbc4259829589f4a32881151c49e36384d9eb982b35a12020"},
{file = "pandas-2.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcd471c9d9f60926ab2f15c6c29164112f458acb42280365fbefa542d0c2fc74"},
{file = "pandas-2.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253edfd015520ce77a9343eb7097429479c039cd3ebe81d7810ea11b4b24695"},
{file = "pandas-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977326039bd1ded620001a1889e2ed4798460a6bc5a24fbaebb5f07a41c32a55"},
{file = "pandas-2.0.0-cp38-cp38-win32.whl", hash = "sha256:78425ca12314b23356c28b16765639db10ebb7d8983f705d6759ff7fe41357fa"},
{file = "pandas-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:d93b7fcfd9f3328072b250d6d001dcfeec5d3bb66c1b9c8941e109a46c0c01a8"},
{file = "pandas-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:425705cee8be54db2504e8dd2a730684790b15e5904b750c367611ede49098ab"},
{file = "pandas-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f789b7c012a608c08cda4ff0872fd979cb18907a37982abe884e6f529b8793"},
{file = "pandas-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bb9d840bf15656805f6a3d87eea9dcb7efdf1314a82adcf7f00b820427c5570"},
{file = "pandas-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0778ab54c8f399d83d98ffb674d11ec716449956bc6f6821891ab835848687f2"},
{file = "pandas-2.0.0-cp39-cp39-win32.whl", hash = "sha256:70db5c278bbec0306d32bf78751ff56b9594c05a5098386f6c8a563659124f91"},
{file = "pandas-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f3320bb55f34af4193020158ef8118ee0fb9aec7cc47d2084dbfdd868a0a24f"},
{file = "pandas-2.0.0.tar.gz", hash = "sha256:cda9789e61b44463c1c4fe17ef755de77bcd13b09ba31c940d20f193d63a5dc8"},
]
[package.dependencies]
numpy = [
{version = ">=1.21.0", markers = "python_version >= \"3.10\""},
{version = ">=1.23.2", markers = "python_version >= \"3.11\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
tzdata = ">=2022.1"
[package.extras]
all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"]
aws = ["s3fs (>=2021.08.0)"]
clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"]
compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"]
computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"]
excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"]
feather = ["pyarrow (>=7.0.0)"]
fss = ["fsspec (>=2021.07.0)"]
gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"]
hdf5 = ["tables (>=3.6.1)"]
html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"]
mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"]
output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"]
parquet = ["pyarrow (>=7.0.0)"]
performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"]
plot = ["matplotlib (>=3.6.1)"]
postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"]
spss = ["pyreadstat (>=1.1.2)"]
sql-other = ["SQLAlchemy (>=1.4.16)"]
test = ["hypothesis (>=6.34.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.6.3)"]
[[package]] [[package]]
name = "pluggy" name = "pluggy"
version = "1.0.0" version = "1.0.0"
...@@ -1113,14 +1187,14 @@ email = ["email-validator (>=1.0.3)"] ...@@ -1113,14 +1187,14 @@ email = ["email-validator (>=1.0.3)"]
[[package]] [[package]]
name = "pygments" name = "pygments"
version = "2.14.0" version = "2.15.0"
description = "Pygments is a syntax highlighting package written in Python." description = "Pygments is a syntax highlighting package written in Python."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.7"
files = [ files = [
{file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, {file = "Pygments-2.15.0-py3-none-any.whl", hash = "sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094"},
{file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, {file = "Pygments-2.15.0.tar.gz", hash = "sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500"},
] ]
[package.extras] [package.extras]
...@@ -1170,6 +1244,21 @@ psutil = ["psutil (>=3.0)"] ...@@ -1170,6 +1244,21 @@ psutil = ["psutil (>=3.0)"]
setproctitle = ["setproctitle"] setproctitle = ["setproctitle"]
testing = ["filelock"] testing = ["filelock"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.0.0" version = "1.0.0"
...@@ -1200,6 +1289,18 @@ files = [ ...@@ -1200,6 +1289,18 @@ files = [
[package.extras] [package.extras]
dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"]
[[package]]
name = "pytz"
version = "2023.3"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
{file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
]
[[package]] [[package]]
name = "pyyaml" name = "pyyaml"
version = "6.0" version = "6.0"
...@@ -1435,6 +1536,18 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g ...@@ -1435,6 +1536,18 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]] [[package]]
name = "sniffio" name = "sniffio"
version = "1.3.0" version = "1.3.0"
...@@ -1711,6 +1824,18 @@ files = [ ...@@ -1711,6 +1824,18 @@ files = [
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
] ]
[[package]]
name = "tzdata"
version = "2023.3"
description = "Provider of IANA time zone data"
category = "main"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"},
{file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"},
]
[[package]] [[package]]
name = "ujson" name = "ujson"
version = "5.7.0" version = "5.7.0"
...@@ -1992,4 +2117,4 @@ files = [ ...@@ -1992,4 +2117,4 @@ files = [
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "dfd9dd13f5970323abae96a9b68c21a666a0befb4d9111bf1c12bdfb832ee332" content-hash = "a69c1ad521839c159e8faf2ac74dba046f5ee8c93c90df3492c21f4c4822cfda"
[tool.poetry] [tool.poetry]
name = "audio-analyzer" name = "audio-analyzer"
version = "0.0.1" version = "0.1.0"
description = "MPAI CAE-ARP Audio Analyser" description = "MPAI CAE-ARP Audio Analyser"
authors = ["Matteo Spanio <dev2@audioinnova.com>"] authors = ["Matteo Spanio <dev2@audioinnova.com>"]
license = "GPLv3" license = "GPLv3"
...@@ -11,6 +11,8 @@ python = "^3.10" ...@@ -11,6 +11,8 @@ python = "^3.10"
mpai-cae-arp = "^0.2.3" mpai-cae-arp = "^0.2.3"
numpy = "1.23.3" numpy = "1.23.3"
rich = "^13.3.3" rich = "^13.3.3"
pandas = "^2.0.0"
scikit-learn = "^1.2.2"
[tool.poetry.group.docs.dependencies] [tool.poetry.group.docs.dependencies]
...@@ -32,7 +34,7 @@ build-backend = "poetry.core.masonry.api" ...@@ -32,7 +34,7 @@ build-backend = "poetry.core.masonry.api"
[tool.ruff] [tool.ruff]
line-length = 88 line-length = 88
src = ["src"] src = ["src", "tests"]
select = [ select = [
"E", # pycodestyle "E", # pycodestyle
"F", # pyflakes "F", # pyflakes
...@@ -41,5 +43,5 @@ select = [ ...@@ -41,5 +43,5 @@ select = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
minversion = "6.0" minversion = "6.0"
addopts = "-ra -q -n auto -W error::RuntimeWarning" addopts = "-ra -q -W error::RuntimeWarning"
testpaths = ["tests"] testpaths = ["tests"]
\ No newline at end of file
import argparse import argparse
import os import os
import time import sys
from rich.console import Console from rich.console import Console
import segment_finder as sf import segment_finder as sf
from mpai_cae_arp.types.irregularity import IrregularityFile from mpai_cae_arp.types.irregularity import IrregularityFile
from mpai_cae_arp.files import File, FileType
from mpai_cae_arp.io import prettify, Style
def get_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("--working-directory", "-w", help="Working directory", required=True) def get_args() -> tuple[str, str]:
parser.add_argument("--files-name", "-f", nargs="+", help="Files name", required=True) if len(sys.argv) > 1:
return parser.parse_args() parser = argparse.ArgumentParser(
prog="audio-analyzer",
formatter_class=argparse.RawDescriptionHelpFormatter,
description=f"A tool that implements {prettify('MPAI CAE-ARP Audio Analyser', styles=[Style.BOLD])} Technical Specification.",
epilog="For support, please contact Matteo Spanio <dev2@audioinnova.com>.\n"
"This software is licensed under the GNU General Public License v3.0."
)
parser.add_argument("--working-directory", "-w", help="The path were the AIW will find and save the files")
parser.add_argument("--files-name", "-f", help=f"The name of the files to be analyzed {prettify('without extension', styles=[Style.UNDERLINE])}")
args = parser.parse_args()
return args.working_directory, args.files_name
else:
return os.getenv("WORKING_DIRECTORY"), os.getenv("FILES_NAME")
def exit_with_error(error_message: str, console) -> None: def exit_with_error(error_message: str, console) -> None:
console.print(f"[red bold]Error: {error_message}") console.print(f"[red bold]Error: {error_message} :t-rex:")
quit(os.EX_USAGE) quit(os.EX_USAGE)
def main(): def main() -> None:
args = get_args()
console = Console() console = Console()
console.print("[bold]Welcome to ARP Audio Analyzer!") console.print("[bold]Welcome to ARP Audio Analyser!")
working_directory = args.working_directory
files_name = args.files_name
with console.status("Reading input files...", spinner="dots"): working_directory, files_name = get_args()
time.sleep(1) if any(map(lambda x: x is None, [working_directory, files_name])):
audio_src = os.path.join(working_directory, "PreservationAudioFile", f"{files_name[0]}.wav") exit_with_error("{}\n{}".format(
video_src = os.path.join(working_directory, "PreservationAudioVisualFile", f"{files_name[0]}.mov") "Working directory or files name not specified!",
"Try -h/--help to know more about Audio Analyser usage"), console)
with console.status("[purple]Reading input files", spinner="dots"):
audio_src = os.path.join(working_directory, "PreservationAudioFile", f"{files_name}.wav")
video_src = os.path.join(working_directory, "PreservationAudioVisualFile", f"{files_name}.mov")
console.log(audio_src)
console.log(video_src)
audio_exists = os.path.exists(audio_src) audio_exists = os.path.exists(audio_src)
video_exists = os.path.exists(video_src) video_exists = os.path.exists(video_src)
...@@ -49,20 +61,44 @@ def main(): ...@@ -49,20 +61,44 @@ def main():
exit_with_error("Input files not found!", console) exit_with_error("Input files not found!", console)
# create irregularity file 1 # create irregularity file 1
with console.status("Creating irregularity file 1...", spinner="dots"): with console.status("[purple]Creating irregularity file 1", spinner="dots"):
irreg1 = sf.create_irreg_file(audio_src, video_src) irreg1 = sf.create_irreg_file(audio_src, video_src)
console.log(f"Found {len(irreg1.irregularities)} irregularities from Audio source")
File(f"{working_directory}/IrregularityFile1.json", FileType.JSON).write_content(irreg1.to_json())
console.log("[geen]Irregularity file 1 created")
# create irregularity file 2 # create irregularity file 2
with console.status("Creating irregularity file 2...", spinner="dots"): with console.status("[purple]Creating irregularity file 2", spinner="dots"):
video_irreg_1 = {} video_irreg_1 = {
"Irregularities": [
{
"TimeLabel": "00:03:00.000",
"Source": "v",
"IrregularityID": "09a0b0c0-d0e0-f000-0000-000000000000"
},
{
"TimeLabel": "00:03:01.000",
"Source": "v",
"IrregularityID": "09a0b0c0-d0e0-f000-0000-000000000001"
}
],
"Offset": 170
}
console.log("Video irregularity file 1 found")
irreg2 = sf.merge_irreg_files(irreg1, IrregularityFile.from_json(video_irreg_1)) irreg2 = sf.merge_irreg_files(irreg1, IrregularityFile.from_json(video_irreg_1))
File(f"{working_directory}/IrregularityFile2.json", FileType.JSON).write_content(irreg2.to_json())
console.log("[geen]Irregularity file 2 created")
with console.status("Extracting audio irregularities...", spinner="bouncingBall"): with console.status("[cyan]Extracting audio irregularities", spinner="dots"):
sf.extract_audio_irregularities(audio_src, irreg2, working_directory) sf.extract_audio_irregularities(audio_src, irreg2, working_directory)
console.log("[green]Audio irregularities extracted")
# classify audio irregularities # classify audio irregularities
with console.status("[cyan bold]Classifying audio irregularities", spinner="monkey"):
sf.classify_audio_irregularities(working_directory)
console.print("[green bold]Success!") console.print("[green bold]Success! :tada:")
quit(os.EX_OK)
if __name__ == "__main__": if __name__ == "__main__":
......
"""
The :py:mod:`ml` contains a set of facilities for data analysis: it is structured in three main modules, one :mod:`ml.classification` where can be found functions for fitting and evaluating classifiers, one :mod:`ml.clusters` where can be found functions for fitting and evaluating clustering algorithms, :mod:`ml.datasets` contains functions to easily get the datasets described in :ref:`datasets` and :mod:`ml.visualization` where can be found functions for visualizing data and results. All the modules interface with :py:mod:`sklearn` and :py:mod:`pandas` modules.
This module is specific for analyzing data from the :ref:`datasets` module, instead of :mod:`audiohandler` which as been designed for represent audio data in any kind of applications.
"""
from ._classification import load_model, generate_classifier
from ._data_structures import Classifier, ClassificationResult
__all__ = [
"load_model", "generate_classifier", "Classifier",
"ClassificationResult"
]
import pickle
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
from ml.datasets import load_berio_nono, load_pretto
from ._data_structures import Classifier
from ._constants import CLASSIFICATION_MAPPING, MODELS_PATH
def load_model(model_name: str) -> Classifier:
"""Load a trained classifier from disk.
Aviable models are:
- pretto_and_berio_nono_classifier
Parameters
----------
model_name: str
the path of the model to be loaded
Raises
------
ValueError
if the model name is not valid
Returns
-------
Classifier
the classifier loaded from disk
"""
models = {
"pretto_classifier":
MODELS_PATH.joinpath('pretto_classifier.pkl'),
"pretto_and_berio_nono_classifier":
MODELS_PATH.joinpath('pretto_and_berio_nono_classifier.pkl')
}
try:
with open(models[model_name], 'rb') as f:
return Classifier(pickle.load(f))
except FileNotFoundError:
generate_classifier(models[model_name])
return load_model(model_name)
def generate_classifier(dest_path):
data1 = load_pretto()
data2 = load_berio_nono()
data = pd.concat([data1, data2])
data = data.replace(CLASSIFICATION_MAPPING)
X = data.drop(columns=['noise_type', 'label'], axis=1)
y = data.label
rfc = RandomForestClassifier(n_estimators=111,
criterion="log_loss",
max_features="log2",
min_samples_leaf=1,
n_jobs=-1)
rfc.fit(X, y)
with open(dest_path, 'wb') as f:
pickle.dump(rfc, f)
from importlib import resources
MODELS_FOLDER = 'ml.classification.models'
MODELS_PATH = resources.files(MODELS_FOLDER)
CLASSIFICATION_MAPPING = {
'3N_3N': 0,
'3N_7C': 1,
'3N_7N': 2,
'3N_15C': 3,
'3N_15N': 4,
'7C_3N': 5,
'7C_7C': 6,
'7C_7N': 7,
'7C_15C': 8,
'7C_15N': 9,
'7N_3N': 10,
'7N_7C': 11,
'7N_7N': 12,
'7N_15C': 13,
'7N_15N': 14,
'15C_3N': 15,
'15C_7C': 16,
'15C_7N': 17,
'15C_15C': 18,
'15C_15N': 19,
'15N_3N': 20,
'15N_7C': 21,
'15N_7N': 22,
'15N_15C': 23,
'15N_15N': 24
}
INVERSE_CLASSIFICATION_MAPPING = {
v: k
for k, v in CLASSIFICATION_MAPPING.items()
}
from dataclasses import dataclass
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from mpai_cae_arp.audio.standards import EqualizationStandard, SpeedStandard
from ._constants import INVERSE_CLASSIFICATION_MAPPING
@dataclass
class ClassificationResult:
"""
A class to represent the result of a classification.
Since the classification recognizes 4 informations, those are stored in a container class to have the possibility to find all informations in the same place, but, when necessary, use only the part that is needed.
Informations are:
- the reading speed of the tape,
- the writing speed,
- the reading post-emphasis equalization curve
- the writing pre-emphasis equalization curve
"""
writing_speed: SpeedStandard
reading_speed: SpeedStandard
writing_equalization: EqualizationStandard
reading_equalization: EqualizationStandard
_MAP_CLASS_TO_RESULT = {
'3N_3N':
ClassificationResult(SpeedStandard.III, SpeedStandard.III,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'3N_7C':
ClassificationResult(SpeedStandard.III, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'3N_7N':
ClassificationResult(SpeedStandard.III, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'3N_15C':
ClassificationResult(SpeedStandard.III, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'3N_15N':
ClassificationResult(SpeedStandard.III, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'7C_3N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.III,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'7C_7C':
ClassificationResult(SpeedStandard.IV, SpeedStandard.IV,
EqualizationStandard.CCIR, EqualizationStandard.CCIR),
'7C_7N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.IV,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'7C_15C':
ClassificationResult(SpeedStandard.IV, SpeedStandard.V,
EqualizationStandard.CCIR, EqualizationStandard.CCIR),
'7C_15N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.V,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'7N_3N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.III,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'7N_7C':
ClassificationResult(SpeedStandard.IV, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'7N_7N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'7N_15C':
ClassificationResult(SpeedStandard.IV, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'7N_15N':
ClassificationResult(SpeedStandard.IV, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'15C_3N':
ClassificationResult(SpeedStandard.V, SpeedStandard.III,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'15C_7C':
ClassificationResult(SpeedStandard.V, SpeedStandard.IV,
EqualizationStandard.CCIR, EqualizationStandard.CCIR),
'15C_7N':
ClassificationResult(SpeedStandard.V, SpeedStandard.IV,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'15C_15C':
ClassificationResult(SpeedStandard.V, SpeedStandard.V,
EqualizationStandard.CCIR, EqualizationStandard.CCIR),
'15C_15N':
ClassificationResult(SpeedStandard.V, SpeedStandard.V,
EqualizationStandard.CCIR, EqualizationStandard.NAB),
'15N_3N':
ClassificationResult(SpeedStandard.V, SpeedStandard.III,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'15N_7C':
ClassificationResult(SpeedStandard.V, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'15N_7N':
ClassificationResult(SpeedStandard.V, SpeedStandard.IV,
EqualizationStandard.NAB, EqualizationStandard.NAB),
'15N_15C':
ClassificationResult(SpeedStandard.V, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.CCIR),
'15N_15N':
ClassificationResult(SpeedStandard.V, SpeedStandard.V,
EqualizationStandard.NAB, EqualizationStandard.NAB),
}
class Classifier:
model: RandomForestClassifier | DecisionTreeClassifier | KNeighborsClassifier
def __init__(self, model):
self.model = model
def predict(self, x: pd.DataFrame) -> pd.DataFrame:
prediction = self.model.predict(x)
prediction = pd.DataFrame(prediction, columns=['classification'])
prediction = prediction.replace(INVERSE_CLASSIFICATION_MAPPING)
prediction = prediction.replace(_MAP_CLASS_TO_RESULT)
return prediction
def get_model_description(self) -> str:
return str(self.model)
"""
The :mod:`ml.datasets` module includes utilities to load datasets.
"""
from ._loaders import load_pretto, load_berio_nono, _filter_dataset
__all__ = ['load_pretto', 'load_berio_nono', '_filter_dataset']
import itertools
from importlib import resources
import pandas as pd
_DATA_MODULE = 'ml.datasets'
def _filter_dataset(data: pd.DataFrame,
labels: list | None = None,
noise_type: str | None = None,
combination: bool = False):
df = data
if labels is not None:
if combination:
df = data[data['label'].isin(
['_'.join(l) for l in itertools.product(labels, labels)])]
else:
df = data[data['label'].isin(labels)]
if noise_type is not None:
df = df[df.noise_type == noise_type]
return df
def load_pretto(filters: dict = None, return_X_y: bool = False):
"""Load and return the Pretto dataset (classification).
================= ============================
Classes 25
Samples per noise 2075 (A), 5050 (B), 1933 (C)
Samples total 9058
Dimensionality 15
Features string, float
================= ============================
Read more in the :ref:`Datasets <pretto>`.
Examples
--------
.. doctest::
>>> from ml.datasets import load_pretto
>>> data = load_pretto(filters={'labels': ['7C', '7N'], 'noise_type': None, 'combination': True})
>>> data.noise_type.unique()
array(['A', 'B', 'C'], dtype=object)
>>> data.label.unique()
array(['7C_7C', '7C_7N', '7N_7C', '7N_7N'], dtype=object)
"""
data = pd.read_csv(resources.files(_DATA_MODULE).joinpath('data/train.csv'))
if filters is not None:
data = _filter_dataset(data, filters.get('labels'),
filters.get('noise_type'),
filters.get('combination'))
if return_X_y:
return data.drop("label", axis=1), data["label"]
return data
def load_berio_nono(filters: dict = None, return_X_y: bool = False):
"""Load and return the Berio-Nono dataset (classification).
================= ============================
Classes 4
Samples per noise 1231 (A), 1796 (B), 9175 (C)
Samples total 12202
Dimensionality 15
Features string, float
================= ============================
Read more in the :ref:`Datasets <berio-nono>`.
Examples
--------
.. doctest::
>>> from ml.datasets import load_berio_nono
>>> data = load_berio_nono(filters={'labels': ['7C', '7N'], 'noise_type': None, 'combination': True})
>>> data.noise_type.unique()
array(['A', 'B', 'C'], dtype=object)
>>> data.label.unique()
array(['7C_7C', '7N_7N'], dtype=object)
"""
data = pd.read_csv(resources.files(_DATA_MODULE).joinpath('data/test.csv'))
if filters is not None:
data = _filter_dataset(data, filters.get('labels'),
filters.get('noise_type'),
filters.get('combination'))
if return_X_y:
return data.drop("label", axis=1), data["label"]
return data
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import os import os
import tempfile import tempfile
from uuid import uuid4 from uuid import uuid4
import numpy as np
from mpai_cae_arp.audio import AudioWave, Noise from mpai_cae_arp.audio import AudioWave, Noise
from mpai_cae_arp.files import File, FileType from mpai_cae_arp.files import File, FileType
from mpai_cae_arp.types.irregularity import Irregularity, IrregularityFile, Source from mpai_cae_arp.types.irregularity import Irregularity, IrregularityFile, Source
from mpai_cae_arp.time import frames_to_seconds, seconds_to_frames from mpai_cae_arp.time import frames_to_seconds, seconds_to_frames, seconds_to_string, time_to_seconds
temp_dir = tempfile.gettempdir() temp_dir = tempfile.gettempdir()
TMP_CHANNELS_MAP = os.path.join(temp_dir, "channels_map.json") TMP_CHANNELS_MAP = os.path.join(temp_dir, "channels_map.json")
...@@ -27,17 +26,22 @@ def calculate_offset(audio: AudioWave, video: AudioWave) -> float: ...@@ -27,17 +26,22 @@ def calculate_offset(audio: AudioWave, video: AudioWave) -> float:
float float
""" """
corr = np.correlate(audio.array, video.array, mode="full") # corr = np.correlate(audio.array, video.array, mode="full")
lags = np.arange(-len(audio.array) + 1, len(video.array)) # lags = np.arange(-len(audio.array) + 1, len(video.array))
lag_idx = np.argmax(np.abs(corr)) # lag_idx = np.argmax(np.abs(corr))
return lags[lag_idx] / audio.samplerate # return lags[lag_idx] / audio.samplerate
return 150
def get_irregularities_from_audio(audio_src: AudioWave) -> list[Irregularity]: def get_irregularities_from_audio(audio_src: AudioWave) -> list[Irregularity]:
input_channels: list[AudioWave] = [] input_channels: list[AudioWave] = []
for channel in audio_src.channels:
input_channels.append(audio_src.get_channel(channel)) if audio_src.channels > 1:
for channel in range(audio_src.channels):
input_channels.append(audio_src.get_channel(channel))
else:
input_channels.append(audio_src)
channels_map = {} channels_map = {}
...@@ -52,12 +56,12 @@ def get_irregularities_from_audio(audio_src: AudioWave) -> list[Irregularity]: ...@@ -52,12 +56,12 @@ def get_irregularities_from_audio(audio_src: AudioWave) -> list[Irregularity]:
id = uuid4() id = uuid4()
irreg_list.append( irreg_list.append(
Irregularity( Irregularity(
uuid=id, irregularity_ID=id,
source=Source.AUDIO, source=Source.AUDIO,
time_label=frames_to_seconds(start, audio.samplerate) time_label= seconds_to_string(frames_to_seconds(start, audio.samplerate))
) )
) )
channels_map[id] = idx channels_map[str(id)] = idx
File(TMP_CHANNELS_MAP, FileType.JSON).write_content(channels_map) File(TMP_CHANNELS_MAP, FileType.JSON).write_content(channels_map)
...@@ -69,32 +73,57 @@ def create_irreg_file(audio_src: str, video_src: str) -> IrregularityFile: ...@@ -69,32 +73,57 @@ def create_irreg_file(audio_src: str, video_src: str) -> IrregularityFile:
audio = AudioWave.from_file(audio_src, bufferize=True) audio = AudioWave.from_file(audio_src, bufferize=True)
offset = calculate_offset(audio, video_src) offset = calculate_offset(audio, video_src)
return IrregularityFile(get_irregularities_from_audio(audio), offset=offset) irregularities = get_irregularities_from_audio(audio)
irregularities.sort(key=lambda x: time_to_seconds(x.time_label))
return IrregularityFile(irregularities=irregularities, offset=offset)
def merge_irreg_files( def merge_irreg_files(
file1: IrregularityFile, file1: IrregularityFile,
file2: IrregularityFile) -> IrregularityFile: file2: IrregularityFile
new_file = IrregularityFile( ) -> IrregularityFile:
irregularities=file1.irregularities + file2.irregularities,
offset=np.argmax([file1.offset, file2.offset])) match file1.offset, file2.offset:
case None, _:
offset=file2.offset
case _, None:
offset=file1.offset
case _, _:
offset=max(file1.offset, file2.offset)
new_file.irregularities.sort(key=lambda x: x.time_label) irregularities = file1.irregularities + file2.irregularities
irregularities.sort(key=lambda x: time_to_seconds(x.time_label))
new_file = IrregularityFile(
irregularities=irregularities, offset=offset)
return new_file return new_file
def extract_audio_irregularities( def extract_audio_irregularities(
audio: AudioWave, audio_src: str,
irreg_file: IrregularityFile, irreg_file: IrregularityFile,
path: str) -> None: path: str
) -> None:
channels_map = File(TMP_CHANNELS_MAP, FileType.JSON).get_content() channels_map = File(TMP_CHANNELS_MAP, FileType.JSON).get_content()
os.makedirs(f"{path}/AudioBlocks", exist_ok=True)
audio = AudioWave.from_file(audio_src, bufferize=True)
for irreg in irreg_file.irregularities: for irreg in irreg_file.irregularities:
if irreg.source == Source.AUDIO: if channels_map.get(str(irreg.irregularity_ID)) is None:
chunk = audio.get_channel(channels_map[irreg.irregularity_ID])[ audio[seconds_to_frames(
seconds_to_frames( time_to_seconds(irreg.time_label), audio.samplerate
irreg.time_label, audio.samplerate ):seconds_to_frames(
):seconds_to_frames( time_to_seconds(irreg.time_label), audio.samplerate)+audio.samplerate//2]\
irreg.time_label, audio.samplerate)+audio.samplerate//2] .save(f"{path}/AudioBlocks/{irreg.irregularity_ID}.wav")
chunk.save(f"{path}/AudioBlocks/{irreg.irregularity_ID}.wav") else:
audio.get_channel(channels_map[str(irreg.irregularity_ID)])[
seconds_to_frames(
time_to_seconds(irreg.time_label), audio.samplerate
):seconds_to_frames(
time_to_seconds(irreg.time_label), audio.samplerate)+audio.samplerate//2]\
.save(f"{path}/AudioBlocks/{irreg.irregularity_ID}.wav")
os.remove(TMP_CHANNELS_MAP) os.remove(TMP_CHANNELS_MAP)
import os
import tempfile
import uuid
import numpy as np
from mpai_cae_arp.audio import AudioWave
from mpai_cae_arp.types.irregularity import Irregularity, IrregularityFile, Source
import segment_finder as sf
def test_calculate_offset():
audio = AudioWave(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 24, 1, 8000)
video = AudioWave(np.array([0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 24, 1, 8000)
offset = sf.calculate_offset(audio, video)
assert offset == 0.0
def test_get_irregularities_from_audio():
audio = AudioWave(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 24, 1, 8000)
irregularities = sf.get_irregularities_from_audio(audio)
assert irregularities == []
def test_merge_irreg_files():
file1 = IrregularityFile(
irregularities=[
Irregularity(
irregularity_ID=uuid.uuid4(),
source=Source.AUDIO,
time_label="00:10:00.000"
)],
offset=0.0)
file2 = IrregularityFile(
irregularities=[
Irregularity(
irregularity_ID=uuid.uuid4(),
source=Source.AUDIO,
time_label="00:00:00.000")],
offset=1.0)
new_file = sf.merge_irreg_files(file1, file2)
assert new_file.offset == 1.0
assert len(new_file.irregularities) == 2
def test_extract_audio_irregularities():
audio = AudioWave(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 24, 1, 8000)
irregularities = sf.get_irregularities_from_audio(audio)
irreg_file = IrregularityFile(irregularities=irregularities, offset=0.0)
sf.extract_audio_irregularities(audio, irreg_file, tempfile.gettempdir())
for irreg in irreg_file.irregularities:
if irreg.source == Source.AUDIO:
assert os.path.exists(f"{tempfile.gettempdir()}/AudioBlocks/{irreg.irregularity_ID}.wav")
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment