feat: wip for multiple tenants

This commit is contained in:
Jonathan Kössler 2024-07-09 18:20:55 +02:00
parent de41030e69
commit b2e3ae092f
7 changed files with 807 additions and 62 deletions

249
poetry.lock generated
View File

@ -1,5 +1,35 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "aio-pika"
version = "9.4.2"
description = "Wrapper around the aiormq for asyncio and humans"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "aio_pika-9.4.2-py3-none-any.whl", hash = "sha256:22e5fa27d10a3817dd24c031cc477953aaf7c3be5f4f25d2582a55ec229adc4c"},
{file = "aio_pika-9.4.2.tar.gz", hash = "sha256:d1217dc28d09be9dff96c06cdf2e82c92599a34f154e8932bf35373157f3424d"},
]
[package.dependencies]
aiormq = ">=6.8.0,<6.9.0"
yarl = "*"
[[package]]
name = "aiormq"
version = "6.8.0"
description = "Pure python AMQP asynchronous client library"
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "aiormq-6.8.0-py3-none-any.whl", hash = "sha256:9a16174dcae4078c957a773d2f02d3dfd6c2fcf12c909dc244333a458f2aeab0"},
{file = "aiormq-6.8.0.tar.gz", hash = "sha256:198f9c7430feb7bc491016099a06266dc45880b6b1de3925d410fde6541a66fb"},
]
[package.dependencies]
pamqp = "3.3.0"
yarl = "*"
[[package]]
name = "annotated-types"
version = "0.7.0"
@ -1604,6 +1634,105 @@ requests-oauthlib = ">=0.5.0"
[package.extras]
async = ["aiodns", "aiohttp (>=3.0)"]
[[package]]
name = "multidict"
version = "6.0.5"
description = "multidict implementation"
optional = false
python-versions = ">=3.7"
files = [
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
{file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
{file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
{file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
{file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
{file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
{file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
{file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
{file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
{file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
{file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
{file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
{file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
{file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
{file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
{file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
{file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
{file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
{file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
{file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
{file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@ -2068,6 +2197,21 @@ files = [
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
name = "pamqp"
version = "3.3.0"
description = "RabbitMQ Focused AMQP low-level library"
optional = false
python-versions = ">=3.7"
files = [
{file = "pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0"},
{file = "pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b"},
]
[package.extras]
codegen = ["lxml", "requests", "yapf"]
testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", "flake8-import-order", "flake8-print", "flake8-quotes", "flake8-rst-docstrings", "flake8-tuple", "yapf"]
[[package]]
name = "parso"
version = "0.8.4"
@ -3320,6 +3464,109 @@ files = [
{file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
]
[[package]]
name = "yarl"
version = "1.9.4"
description = "Yet another URL library"
optional = false
python-versions = ">=3.7"
files = [
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
{file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
{file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
{file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
{file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
{file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
{file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
{file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
{file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
{file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
{file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
{file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
{file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
{file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
{file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
{file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
{file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
{file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
{file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
{file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
{file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
]
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
[[package]]
name = "zipp"
version = "3.19.2"
@ -3338,4 +3585,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.11"
content-hash = "62515e90532a3a74d73bc0d386d226abd56ba3de84ce80e853b127bf4d8dc512"
content-hash = "f5092a8dc5540c2085559368c90f5a69efe0b1eba468f5545f29194a305b004d"

View File

@ -1,3 +1,5 @@
import asyncio
from dynaconf import Dynaconf
from fastapi import FastAPI
from kn_utils.logging import logger
@ -5,6 +7,7 @@ from threading import Thread
from pyinfra.config.loader import get_pyinfra_validators, validate_settings
from pyinfra.queue.callback import Callback
from pyinfra.queue.threaded_tenants import ServiceQueueManager, TenantQueueManager
from pyinfra.queue.async_tenants import AsyncQueueManager
from pyinfra.utils.opentelemetry import instrument_pika, setup_trace, instrument_app
from pyinfra.webserver.prometheus import (
add_prometheus_endpoint,
@ -34,8 +37,8 @@ def start_standard_queue_consumer(
app = app or FastAPI()
tenant_manager = TenantQueueManager(settings)
service_manager = ServiceQueueManager(settings)
# tenant_manager = TenantQueueManager(settings)
# service_manager = ServiceQueueManager(settings)
if settings.metrics.prometheus.enabled:
logger.info("Prometheus metrics enabled.")
@ -48,16 +51,21 @@ def start_standard_queue_consumer(
instrument_pika()
instrument_app(app)
app = add_health_check_endpoint(app, service_manager.is_ready)
manager = AsyncQueueManager(settings=settings, message_processor=callback)
# app = add_health_check_endpoint(app, service_manager.is_ready)
app = add_health_check_endpoint(app, manager.is_ready)
webserver_thread = create_webserver_thread_from_settings(app, settings)
webserver_thread.start()
thread_t = Thread(target=tenant_manager.start_consuming, daemon=True)
thread_s = Thread(target=service_manager.start_sequential_basic_get, args=(callback,), daemon=True)
# thread_t = Thread(target=tenant_manager.start_consuming, daemon=True)
# thread_s = Thread(target=service_manager.start_sequential_basic_get, args=(callback,), daemon=True)
thread_t.start()
thread_s.start()
# thread_t.start()
# thread_s.start()
thread_t.join()
thread_s.join()
# thread_t.join()
# thread_s.join()
asyncio.run(manager.start_processing())

View File

@ -0,0 +1,395 @@
import aiormq
import asyncio
import aio_pika
import concurrent.futures
import requests
import json
from aio_pika import Message, DeliveryMode
from aio_pika.abc import AbstractIncomingMessage
from dynaconf import Dynaconf
from typing import Callable, Union
from kn_utils.logging import logger
from pyinfra.config.loader import validate_settings
from pyinfra.config.validators import queue_manager_validators
from pyinfra.config.loader import load_settings, local_pyinfra_root_path
from pyinfra.queue.callback import make_download_process_upload_callback
MessageProcessor = Callable[[dict], dict]
class AsyncQueueManager:
def __init__(self, settings: Dynaconf, message_processor: Callable = None) -> None:
validate_settings(settings, queue_manager_validators)
self.message_processor = message_processor
self.connection_params = self.get_connection_params(settings)
self.connection = None
self.channel = None
self.active_tenants = self.get_initial_tenant_ids(tenant_endpoint_url=settings.storage.tenant_server.endpoint)
self.consumer_tasks = {}
self.connection_sleep = settings.rabbitmq.connection_sleep
self.queue_expiration_time = settings.rabbitmq.queue_expiration_time
self.tenant_created_queue_name = self.get_tenant_created_queue_name(settings)
self.tenant_deleted_queue_name = self.get_tenant_deleted_queue_name(settings)
self.tenant_events_dlq_name = self.get_tenant_events_dlq_name(settings)
self.tenant_exchange_name = settings.rabbitmq.tenant_exchange_name
self.service_request_exchange_name = settings.rabbitmq.service_request_exchange_name
self.service_response_exchange_name = settings.rabbitmq.service_response_exchange_name
self.service_request_queue_prefix = settings.rabbitmq.service_request_queue_prefix
self.service_response_queue_prefix = settings.rabbitmq.service_response_queue_prefix
self.service_dlq_name = settings.rabbitmq.service_dlq_name
@staticmethod
def get_connection_params(settings: Dynaconf):
return {
"host": settings.rabbitmq.host,
"port": settings.rabbitmq.port,
"login": settings.rabbitmq.username,
"password":settings.rabbitmq.password,
"client_properties": {"heartbeat": settings.rabbitmq.heartbeat}
}
def get_initial_tenant_ids(self, tenant_endpoint_url: str) -> set:
response = requests.get(tenant_endpoint_url, timeout=10)
response.raise_for_status() # Raise an HTTPError for bad responses
if response.headers["content-type"].lower() == "application/json":
tenants = {tenant["tenantId"] for tenant in response.json()}
return tenants
return set()
def get_tenant_created_queue_name(self, settings: Dynaconf) -> str:
return self.get_queue_name_with_suffix(
suffix=settings.rabbitmq.tenant_created_event_queue_suffix, pod_name=settings.kubernetes.pod_name
)
def get_tenant_deleted_queue_name(self, settings: Dynaconf) -> str:
return self.get_queue_name_with_suffix(
suffix=settings.rabbitmq.tenant_deleted_event_queue_suffix, pod_name=settings.kubernetes.pod_name
)
def get_tenant_events_dlq_name(self, settings: Dynaconf) -> str:
return self.get_queue_name_with_suffix(
suffix=settings.rabbitmq.tenant_event_dlq_suffix, pod_name=settings.kubernetes.pod_name
)
def get_queue_name_with_suffix(self, suffix: str, pod_name: str) -> str:
if not self.use_default_queue_name() and pod_name:
return f"{pod_name}{suffix}"
return self.get_default_queue_name()
def use_default_queue_name(self) -> bool:
return False
def get_default_queue_name(self):
raise NotImplementedError("Queue name method not implemented")
async def is_ready(self) -> bool:
await self.connect()
return self.channel.is_open
#### ASYNC STUFF
async def purge_queues(self) -> None:
await self.establish_connection()
try:
for tenant_id in self.active_tenants:
service_request_queue = await self.channel.get_queue(f"{self.service_request_queue_prefix}_{tenant_id}")
await service_request_queue.purge()
service_response_queue = await self.channel.get_queue(f"{self.service_response_queue_prefix}_{tenant_id}")
await service_response_queue.purge()
logger.info("Queues purged.")
except aio_pika.exceptions.ChannelInvalidStateError:
pass
async def connect(self):
self.connection = await aio_pika.connect_robust(**self.connection_params)
self.channel = await self.connection.channel()
logger.info("Connection established.")
async def establish_connection(self):
await self.connect()
await self.initialize_queues()
logger.info("Queues initialized.")
# await self.start_processing()
async def start_processing(self):
await self.establish_connection()
tenant_events = asyncio.create_task(self.handle_tenant_events())
service_events = asyncio.create_task(self.start_consumers())
await asyncio.gather(tenant_events, service_events)
async def initialize_queues(self):
await self.channel.set_qos(prefetch_count=1)
service_request_exchange = await self.channel.declare_exchange(name=self.service_request_exchange_name, type=aio_pika.ExchangeType.DIRECT)
service_response_exchange = await self.channel.declare_exchange(name=self.service_response_exchange_name, type=aio_pika.ExchangeType.DIRECT)
for tenant_id in self.active_tenants:
request_queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
request_queue = await self.channel.declare_queue(
name=request_queue_name,
durable=True,
arguments={
"x-dead-letter-exchange": "",
"x-dead-letter-routing-key": self.service_dlq_name,
"x-expires": self.queue_expiration_time, # TODO: check if necessary
"x-max-priority": 2,
},
)
await request_queue.bind(exchange=service_request_exchange, routing_key=tenant_id)
response_queue_name = f"{self.service_response_queue_prefix}_{tenant_id}"
response_queue = await self.channel.declare_queue(
name=response_queue_name,
durable=True,
arguments={
"x-dead-letter-exchange": "",
"x-dead-letter-routing-key": self.service_dlq_name,
"x-expires": self.queue_expiration_time, # TODO: check if necessary
},
)
await response_queue.bind(exchange=service_response_exchange, routing_key=tenant_id)
async def handle_tenant_events(self):
# Declare the topic exchange for tenant events
exchange = await self.channel.declare_exchange(
self.tenant_exchange_name, aio_pika.ExchangeType.TOPIC
)
# Declare a queue for receiving tenant events
queue = await self.channel.declare_queue("tenant_events_queue", arguments={
"x-dead-letter-exchange": "",
"x-dead-letter-routing-key": self.tenant_events_dlq_name,
},
durable=True,)
await queue.bind(exchange, routing_key="tenant.*")
async with queue.iterator() as queue_iter:
async for message in queue_iter:
async with message.process(reject_on_redelivered=True):
routing_key = message.routing_key
tenant_id = message.body.decode()
if routing_key == "tenant.created":
# Handle tenant creation
await self.handle_tenant_created(tenant_id)
elif routing_key == "tenant.deleted":
# Handle tenant deletion
await self.handle_tenant_deleted(tenant_id)
else:
message.nack()
continue
message.ack()
await self.restart_consumers()
async def handle_tenant_created(self, tenant_id):
# Handle creation of input and output queues for the new tenant
await self.create_tenant_queues(tenant_id)
await self.restart_consumers()
async def handle_tenant_deleted(self, tenant_id):
# Handle deletion of input and output queues for the tenant
await self.delete_tenant_queues(tenant_id)
await self.restart_consumers()
async def create_tenant_queues(self, tenant_id):
# Implement queue creation logic for the tenant
queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
queue = await self.channel.declare_queue(
name=queue_name,
durable=True,
arguments={
"x-dead-letter-exchange": "",
"x-dead-letter-routing-key": self.service_dlq_name,
"x-expires": self.queue_expiration_time, # TODO: check if necessary
},
)
exchange = await self.channel.get_exchange(self.service_request_exchange_name)
await queue.bind(exchange=exchange, routing_key=tenant_id)
self.active_tenants.add(tenant_id)
async def delete_tenant_queues(self, tenant_id):
queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
queue = await self.channel.get_queue(queue_name)
exchange = await self.channel.get_exchange(self.service_request_exchange_name)
await queue.unbind(exchange=exchange, routing_key=tenant_id)
await self.channel.queue_delete(queue_name)
self.active_tenants.discard(tenant_id)
async def consume_from_request_queue(self, tenant_id):
queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
queue = await self.channel.get_queue(queue_name)
async with queue.iterator() as queue_iter:
async for message in queue_iter:
async with message.process():
on_message_callback = await self._make_on_message_callback(self.message_processor, tenant_id)
await on_message_callback(message)
async def publish_to_service_response_queue(self, tenant_id, result):
service_response_exchange = await self.channel.get_exchange(self.service_response_exchange_name)
await service_response_exchange.publish(aio_pika.Message(body=json.dumps(result).encode()),
routing_key=tenant_id,)
async def restart_consumers(self):
# Stop current consumers and start new ones for active tenants
await self.stop_consumers()
await self.start_consumers()
async def start_consumers(self):
# Start consuming messages from input queues for active tenants
for tenant_id in self.active_tenants:
if tenant_id not in self.consumer_tasks:
self.consumer_tasks[tenant_id] = asyncio.create_task(self.consume_from_request_queue(tenant_id))
consumer_tasks = [self.consume_from_request_queue(tenant) for tenant in self.active_tenants]
await asyncio.gather(*consumer_tasks)
async def stop_consumers(self):
for task in self.consumer_tasks.values():
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
self.consumer_tasks.clear()
async def main_loop(self):
await self.establish_connection()
async def shutdown(self):
# Implement cleanup logic
await self.stop_consumers()
if self.connection:
await self.connection.close()
async def _make_on_message_callback(self, message_processor: MessageProcessor, tenant_id: str) -> Callable:
async def process_message_body_and_await_result(unpacked_message_body):
# Processing the message in a separate thread is necessary for the main thread pika client to be able to
# process data events (e.g. heartbeats) while the message is being processed.
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor:
logger.info("Processing payload in separate thread.")
future = thread_pool_executor.submit(message_processor, unpacked_message_body)
return future.result()
async def on_message_callback(message: AbstractIncomingMessage):
logger.info(f"Received message from queue with delivery_tag {message.delivery_tag}.")
if message.redelivered:
logger.warning(f"Declining message with {message.delivery_tag=} due to it being redelivered.")
await message.nack(requeue=False)
return
if message.body.decode("utf-8") == "STOP":
logger.info("Received stop signal, stopping consuming...")
await message.ack()
await self.stop_consumers()
return
try:
filtered_message_headers = (
{k: v for k, v in message.properties.headers.items() if k.lower().startswith("x-")}
if message.properties.headers
else {}
)
logger.debug(f"Processing message with {filtered_message_headers=}.")
result: dict = await (
process_message_body_and_await_result({**json.loads(message.body), **filtered_message_headers}) or {}
)
await self.publish_to_service_response_queue(tenant_id, result)
logger.info(f"Published result to queue {tenant_id}.")
await message.ack()
logger.debug(f"Message with {message.delivery_tag=} acknowledged.")
except FileNotFoundError as e:
logger.warning(f"{e}, declining message with {message.delivery_tag=}.")
await message.nack(requeue=False)
except Exception as e:
logger.warning(f"Failed to process message with {message.delivery_tag=}, declining...", exc_info=True)
logger.warning(e)
await message.nack(requeue=False)
raise
return on_message_callback
async def publish_message_to_input_queue(
self, tenant_id: str, message: Union[str, bytes, dict]) -> None:
if isinstance(message, str):
message = message.encode("utf-8")
elif isinstance(message, dict):
message = json.dumps(message).encode("utf-8")
await self.establish_connection()
service_request_exchange = await self.channel.get_exchange(self.service_request_exchange_name)
await service_request_exchange.publish(message=Message(body=message, delivery_mode=DeliveryMode.NOT_PERSISTENT), routing_key=tenant_id)
logger.info(f"Published message to queue {tenant_id}.")
async def publish_message_to_tenant_created_queue(
self, message: Union[str, bytes, dict]) -> None:
if isinstance(message, str):
message = message.encode("utf-8")
elif isinstance(message, dict):
message = json.dumps(message).encode("utf-8")
await self.establish_connection()
service_request_exchange = await self.channel.get_exchange(self.tenant_exchange_name)
await service_request_exchange.publish(message=Message(body=message, delivery_mode=DeliveryMode.NOT_PERSISTENT), routing_key="tenant.created")
logger.info(f"Published message to queue {self.tenant_created_queue_name}.")
async def publish_message_to_tenant_deleted_queue(
self, message: Union[str, bytes, dict]) -> None:
if isinstance(message, str):
message = message.encode("utf-8")
elif isinstance(message, dict):
message = json.dumps(message).encode("utf-8")
await self.establish_connection()
service_request_exchange = await self.channel.get_exchange(self.tenant_exchange_name)
await service_request_exchange.publish(message=Message(body=message, delivery_mode=DeliveryMode.NOT_PERSISTENT), routing_key="tenant.delete")
logger.info(f"Published message to queue {self.tenant_deleted_queue_name}.")
async def main() -> None:
import time
settings = load_settings(local_pyinfra_root_path / "config/")
callback = ""
manager = AsyncQueueManager(settings=settings, message_processor=callback)
await manager.main_loop()
while True:
time.sleep(100)
print("keep idling")
if __name__ == '__main__':
asyncio.run(main())

View File

@ -261,9 +261,9 @@ class ServiceQueueManager(BaseQueueManager):
self.channel.exchange_declare(exchange=self.service_response_exchange_name, exchange_type="direct")
for tenant_id in self.tenant_ids:
response_queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
request_queue_name = f"{self.service_request_queue_prefix}_{tenant_id}"
self.channel.queue_declare(
queue=response_queue_name,
queue=request_queue_name,
durable=True,
arguments={
"x-dead-letter-exchange": "",
@ -273,7 +273,7 @@ class ServiceQueueManager(BaseQueueManager):
},
)
self.channel.queue_bind(
queue=response_queue_name, exchange=self.service_request_exchange_name, routing_key=tenant_id
queue=request_queue_name, exchange=self.service_request_exchange_name, routing_key=tenant_id
)
response_queue_name = f"{self.service_response_queue_prefix}_{tenant_id}"

View File

@ -34,6 +34,7 @@ opentelemetry-instrumentation-requests = "^0.46b0"
opentelemetry-instrumentation-fastapi = "^0.46b0"
wcwidth = "<=0.2.12"
azure-monitor-opentelemetry = "^1.6.0"
aio-pika = "^9.4.2"
[tool.poetry.group.dev.dependencies]
pytest = "^7"

View File

@ -1,15 +1,15 @@
import asyncio
import gzip
import json
import time
from aio_pika.abc import AbstractIncomingMessage
from operator import itemgetter
from kn_utils.logging import logger
from pyinfra.config.loader import load_settings, local_pyinfra_root_path
# from pyinfra.queue.manager import QueueManager
# from pyinfra.queue.sequential_tenants import QueueManager
from pyinfra.queue.threaded_tenants import ServiceQueueManager, TenantQueueManager
from pyinfra.queue.async_tenants import AsyncQueueManager
from pyinfra.storage.storages.s3 import get_s3_storage_from_settings
from pyinfra.storage.storages.s3 import S3Storage
settings = load_settings(local_pyinfra_root_path / "config/")
@ -43,69 +43,64 @@ def tenant_event_message(tenant_id: str):
return {"tenantId": tenant_id}
def send_tenant_event(tenant_id: str, event_type: str):
queue_manager = TenantQueueManager(settings)
queue_manager.purge_queues()
def on_message_callback(storage: S3Storage):
async def on_message(message: AbstractIncomingMessage) -> None:
async with message.process():
if not message.body:
raise ValueError
response = json.loads(message.body)
logger.info(f"Received {response}")
logger.info(f"Message headers: {message.properties.headers}")
await message.ack()
tenant_id, dossier_id, file_id = itemgetter("tenantId", "dossierId", "fileId")(response)
suffix = response["responseFileExtension"]
result = storage.get_object(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}")
result = json.loads(gzip.decompress(result))
logger.info(f"Contents of result on storage: {result}")
return on_message
async def send_tenant_event(queue_manager: AsyncQueueManager, tenant_id: str, event_type: str):
await queue_manager.purge_queues()
message = tenant_event_message(tenant_id)
if event_type == "create":
queue_manager.publish_message_to_tenant_created_queue(message=message)
await queue_manager.publish_message_to_tenant_created_queue(message=message)
elif event_type == "delete":
queue_manager.publish_message_to_tenant_deleted_queue(message=message)
await queue_manager.publish_message_to_tenant_deleted_queue(message=message)
else:
logger.warning(f"Event type '{event_type}' not known.")
queue_manager.stop_consuming()
await queue_manager.stop_consumers()
def send_service_request(tenant_id: str):
queue_manager = ServiceQueueManager(settings)
queue_name = f"{settings.rabbitmq.service_response_queue_prefix}_{tenant_id}"
async def send_service_request(queue_manager: AsyncQueueManager, tenant_id: str):
request_queue_name = f"{settings.rabbitmq.service_request_queue_prefix}_{tenant_id}"
queue_manager.purge_queues()
await queue_manager.purge_queues()
message = upload_json_and_make_message_body(tenant_id)
queue_manager.publish_message_to_input_queue(tenant_id=tenant_id, message=message)
logger.info(f"Put {message} on {queue_name}.")
await queue_manager.publish_message_to_input_queue(tenant_id=tenant_id, message=message)
logger.info(f"Put {message} on {request_queue_name}.")
storage = get_s3_storage_from_settings(settings)
for method_frame, properties, body in queue_manager.channel.consume(
queue=queue_name, inactivity_timeout=15
):
if not body:
break
response = json.loads(body)
logger.info(f"Received {response}")
logger.info(f"Message headers: {properties.headers}")
queue_manager.channel.basic_ack(method_frame.delivery_tag)
tenant_id, dossier_id, file_id = itemgetter("tenantId", "dossierId", "fileId")(response)
suffix = message["responseFileExtension"]
print(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}")
result = storage.get_object(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}")
result = json.loads(gzip.decompress(result))
logger.info(f"Contents of result on storage: {result}")
break
queue_manager.stop_consuming()
response_queue_name = f"{settings.rabbitmq.service_response_queue_prefix}_{tenant_id}"
service_response_queue = await queue_manager.channel.get_queue(name=response_queue_name)
time.sleep(10)
callback = on_message_callback(storage)
await service_response_queue.consume(callback=callback)
await queue_manager.stop_consumers()
if __name__ == "__main__":
tenant_ids = ["a", "b", "c", "d"]
# with ccf.ThreadPoolExecutor() as executor:
# results = executor.map(main, tenant_ids)
# for tenant in tenant_ids:
# main(tenant)
# tenant_ids = ["a", "b", "c", "d"]
send_service_request("redaction")
queue_manager = AsyncQueueManager(settings)
# for tenant in tenant_ids:
# send_tenant_event(tenant_id=tenant, event_type="create")
# asyncio.run(send_tenant_event(queue_manager, "test", "create"))
# # time.sleep(1)
# for tenant in tenant_ids:
# send_service_request(tenant_id=tenant)
asyncio.run(send_service_request(queue_manager,"redaction"))
# # time.sleep(1)
# for tenant in tenant_ids:
# send_tenant_event(tenant_id=tenant, event_type="delete")

View File

@ -0,0 +1,99 @@
import gzip
import json
import time
from operator import itemgetter
from threading import Thread
from kn_utils.logging import logger
from pyinfra.config.loader import load_settings, local_pyinfra_root_path
from pyinfra.queue.threaded_tenants import ServiceQueueManager, TenantQueueManager
from pyinfra.storage.storages.s3 import get_s3_storage_from_settings
settings = load_settings(local_pyinfra_root_path / "config/")
def upload_json_and_make_message_body(tenant_id: str):
dossier_id, file_id, suffix = "dossier", "file", "json.gz"
content = {
"numberOfPages": 7,
"sectionTexts": "data",
}
object_name = f"{tenant_id}/{dossier_id}/{file_id}.{suffix}"
data = gzip.compress(json.dumps(content).encode("utf-8"))
storage = get_s3_storage_from_settings(settings)
if not storage.has_bucket():
storage.make_bucket()
storage.put_object(object_name, data)
message_body = {
"tenantId": tenant_id,
"dossierId": dossier_id,
"fileId": file_id,
"targetFileExtension": suffix,
"responseFileExtension": f"result.{suffix}",
}
return message_body
def tenant_event_message(tenant_id: str):
return {"tenantId": tenant_id}
def send_tenant_event(tenant_id: str, event_type: str):
queue_manager = TenantQueueManager(settings)
queue_manager.purge_queues()
message = tenant_event_message(tenant_id)
if event_type == "create":
queue_manager.publish_message_to_tenant_created_queue(message=message)
elif event_type == "delete":
queue_manager.publish_message_to_tenant_deleted_queue(message=message)
else:
logger.warning(f"Event type '{event_type}' not known.")
queue_manager.stop_consuming()
def send_service_request(tenant_id: str):
queue_manager = ServiceQueueManager(settings)
queue_name = f"{settings.rabbitmq.service_response_queue_prefix}_{tenant_id}"
queue_manager.purge_queues()
message = upload_json_and_make_message_body(tenant_id)
queue_manager.publish_message_to_input_queue(tenant_id=tenant_id, message=message)
logger.info(f"Put {message} on {queue_name}.")
storage = get_s3_storage_from_settings(settings)
for method_frame, properties, body in queue_manager.channel.consume(
queue=queue_name, inactivity_timeout=15
):
if not body:
break
response = json.loads(body)
logger.info(f"Received {response}")
logger.info(f"Message headers: {properties.headers}")
queue_manager.channel.basic_ack(method_frame.delivery_tag)
tenant_id, dossier_id, file_id = itemgetter("tenantId", "dossierId", "fileId")(response)
suffix = message["responseFileExtension"]
print(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}")
result = storage.get_object(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}")
result = json.loads(gzip.decompress(result))
logger.info(f"Contents of result on storage: {result}")
break
queue_manager.stop_consuming()
if __name__ == "__main__":
tenant_ids = ["a", "b", "c", "d"]
for tenant in tenant_ids:
send_tenant_event(tenant_id=tenant, event_type="create")
for tenant in tenant_ids:
send_service_request(tenant_id=tenant)
for tenant in tenant_ids:
send_tenant_event(tenant_id=tenant, event_type="delete")