diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0177c31..6cc462c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,42 +1,54 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks -exclude: ^(docs/|notebooks/|data/|src/secrets/|src/static/|src/templates/|tests) +exclude: ^(docs/|notebooks/|data/|src/configs/|tests/|.hooks/) default_language_version: python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - exclude: bamboo-specs/bamboo.yml + name: Check Gitlab CI (unsafe) + args: [--unsafe] + files: .gitlab-ci.yml + - id: check-yaml + exclude: .gitlab-ci.yml + - id: check-toml + - id: detect-private-key + - id: check-added-large-files + args: ['--maxkb=10000'] + - id: check-case-conflict + - id: mixed-line-ending - # - repo: https://github.com/pycqa/pylint - # rev: v2.16.1 - # hooks: - # - id: pylint - # args: - # ["--max-line-length=120", "--errors-only", "--ignore-imports=true", ] + - repo: https://github.com/pre-commit/mirrors-pylint + rev: v3.0.0a5 + hooks: + - id: pylint + args: + - --disable=C0111,R0903 + - --max-line-length=120 - repo: https://github.com/pre-commit/mirrors-isort rev: v5.10.1 hooks: - id: isort - args: ["--profile", "black"] + args: + - --profile black - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.4.2 hooks: - id: black # exclude: ^(docs/|notebooks/|data/|src/secrets/) args: - --line-length=120 -# - repo: local -# hooks: -# - id: system -# name: PyLint -# entry: poetry run pylint -# language: system -# exclude: ^alembic/ -# files: \.py$ + + - repo: https://github.com/compilerla/conventional-pre-commit + rev: v3.2.0 + hooks: + - id: conventional-pre-commit + pass_filenames: false + stages: [commit-msg] + # args: [] # optional: list of Conventional Commits types to allow e.g. [feat, fix, ci, chore, test] diff --git a/README.md b/README.md index 8292a35..0baee28 100755 --- a/README.md +++ b/README.md @@ -34,34 +34,47 @@ The following table shows all necessary settings. You can find a preconfigured s bitbucket. These are the complete settings, you only need all if using all features of the service as described in the [complete example](pyinfra/examples.py). -| Environment Variable | Internal / .toml Name | Description | -|--------------------------------------|------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| LOGGING__LEVEL | logging.level | Log level | -| METRICS__PROMETHEUS__ENABLED | metrics.prometheus.enabled | Enable Prometheus metrics collection | -| METRICS__PROMETHEUS__PREFIX | metrics.prometheus.prefix | Prefix for Prometheus metrics (e.g. {product}-{service}) | -| WEBSERVER__HOST | webserver.host | Host of the webserver (offering e.g. /prometheus, /ready and /health endpoints) | -| WEBSERVER__PORT | webserver.port | Port of the webserver | -| RABBITMQ__HOST | rabbitmq.host | Host of the RabbitMQ server | -| RABBITMQ__PORT | rabbitmq.port | Port of the RabbitMQ server | -| RABBITMQ__USERNAME | rabbitmq.username | Username for the RabbitMQ server | -| RABBITMQ__PASSWORD | rabbitmq.password | Password for the RabbitMQ server | -| RABBITMQ__HEARTBEAT | rabbitmq.heartbeat | Heartbeat for the RabbitMQ server | -| RABBITMQ__CONNECTION_SLEEP | rabbitmq.connection_sleep | Sleep time intervals during message processing. Has to be a divider of heartbeat, and shouldn't be too big, since only in these intervals queue interactions happen (like receiving new messages) This is also the minimum time the service needs to process a message. | -| RABBITMQ__INPUT_QUEUE | rabbitmq.input_queue | Name of the input queue | -| RABBITMQ__OUTPUT_QUEUE | rabbitmq.output_queue | Name of the output queue | -| RABBITMQ__DEAD_LETTER_QUEUE | rabbitmq.dead_letter_queue | Name of the dead letter queue | -| STORAGE__BACKEND | storage.backend | Storage backend to use (currently only "s3" and "azure" are supported) | -| STORAGE__S3__BUCKET | storage.s3.bucket | Name of the S3 bucket | -| STORAGE__S3__ENDPOINT | storage.s3.endpoint | Endpoint of the S3 server | -| STORAGE__S3__KEY | storage.s3.key | Access key for the S3 server | -| STORAGE__S3__SECRET | storage.s3.secret | Secret key for the S3 server | -| STORAGE__S3__REGION | storage.s3.region | Region of the S3 server | -| STORAGE__AZURE__CONTAINER | storage.azure.container_name | Name of the Azure container | -| STORAGE__AZURE__CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | -| STORAGE__TENANT_SERVER__PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | -| STORAGE__TENANT_SERVER__ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | -| TRACING__OPENTELEMETRY__ENDPOINT | tracing.opentelemetry.endpoint | Endpoint to which OpenTelemetry traces are exported | -| TRACING__OPENTELEMETRY__SERVICE_NAME | tracing.opentelemetry.service_name | Name of the service as displayed in the traces collected | +| Environment Variable | Internal / .toml Name | Description | +| ------------------------------------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| LOGGING\_\_LEVEL | logging.level | Log level | +| CONCURRENCY\_\_ENABLED | concurrency.enabled | Enable multi tenant queue mode | +| METRICS\_\_PROMETHEUS\_\_ENABLED | metrics.prometheus.enabled | Enable Prometheus metrics collection | +| METRICS\_\_PROMETHEUS\_\_PREFIX | metrics.prometheus.prefix | Prefix for Prometheus metrics (e.g. {product}-{service}) | +| WEBSERVER\_\_HOST | webserver.host | Host of the webserver (offering e.g. /prometheus, /ready and /health endpoints) | +| WEBSERVER\_\_PORT | webserver.port | Port of the webserver | +| RABBITMQ\_\_HOST | rabbitmq.host | Host of the RabbitMQ server | +| RABBITMQ\_\_PORT | rabbitmq.port | Port of the RabbitMQ server | +| RABBITMQ\_\_USERNAME | rabbitmq.username | Username for the RabbitMQ server | +| RABBITMQ\_\_PASSWORD | rabbitmq.password | Password for the RabbitMQ server | +| RABBITMQ\_\_HEARTBEAT | rabbitmq.heartbeat | Heartbeat for the RabbitMQ server | +| RABBITMQ\_\_CONNECTION_SLEEP | rabbitmq.connection_sleep | Sleep time intervals during message processing. Has to be a divider of heartbeat, and shouldn't be too big, since only in these intervals queue interactions happen (like receiving new messages) This is also the minimum time the service needs to process a message. | +| RABBITMQ\_\_INPUT_QUEUE | rabbitmq.input_queue | Name of the input queue in single queue setting | +| RABBITMQ\_\_OUTPUT_QUEUE | rabbitmq.output_queue | Name of the output queue in single queue setting | +| RABBITMQ\_\_DEAD_LETTER_QUEUE | rabbitmq.dead_letter_queue | Name of the dead letter queue in single queue setting | +| RABBITMQ\_\_TENANT_EVENT_QUEUE_SUFFIX | rabbitmq.tenant_event_queue_suffix | Suffix for the tenant event queue in multi tenant/queue setting | +| RABBITMQ\_\_TENANT_EVENT_DLQ_SUFFIX | rabbitmq.tenant_event_dlq_suffix | Suffix for the dead letter queue in multi tenant/queue setting | +| RABBITMQ\_\_TENANT_EXCHANGE_NAME | rabbitmq.tenant_exchange_name | Name of tenant exchange in multi tenant/queue setting | +| RABBITMQ\_\_QUEUE_EXPIRATION_TIME | rabbitmq.queue_expiration_time | Time until queue expiration in multi tenant/queue setting | +| RABBITMQ\_\_SERVICE_REQUEST_QUEUE_PREFIX | rabbitmq.service_request_queue_prefix | Service request queue prefix in multi tenant/queue setting | +| RABBITMQ\_\_SERVICE_REQUEST_EXCHANGE_NAME | rabbitmq.service_request_exchange_name | Service request exchange name in multi tenant/queue setting | +| RABBITMQ\_\_SERVICE_RESPONSE_EXCHANGE_NAME | rabbitmq.service_response_exchange_name | Service response exchange name in multi tenant/queue setting | +| RABBITMQ\_\_SERVICE_DLQ_NAME | rabbitmq.service_dlq_name | Service dead letter queue name in multi tenant/queue setting | +| STORAGE\_\_BACKEND | storage.backend | Storage backend to use (currently only "s3" and "azure" are supported) | +| STORAGE\_\_S3\_\_BUCKET | storage.s3.bucket | Name of the S3 bucket | +| STORAGE\_\_S3\_\_ENDPOINT | storage.s3.endpoint | Endpoint of the S3 server | +| STORAGE\_\_S3\_\_KEY | storage.s3.key | Access key for the S3 server | +| STORAGE\_\_S3\_\_SECRET | storage.s3.secret | Secret key for the S3 server | +| STORAGE\_\_S3\_\_REGION | storage.s3.region | Region of the S3 server | +| STORAGE\_\_AZURE\_\_CONTAINER | storage.azure.container_name | Name of the Azure container | +| STORAGE\_\_AZURE\_\_CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | +| STORAGE\_\_TENANT_SERVER\_\_PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | +| STORAGE\_\_TENANT_SERVER\_\_ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | +| TRACING\_\_ENABLED | tracing.enabled | Enable tracing | +| TRACING\_\_TYPE | tracing.type | Tracing mode - possible values: "opentelemetry", "azure_monitor" (Excpects APPLICATIONINSIGHTS_CONNECTION_STRING environment variable.) | +| TRACING\_\_OPENTELEMETRY\_\_ENDPOINT | tracing.opentelemetry.endpoint | Endpoint to which OpenTelemetry traces are exported | +| TRACING\_\_OPENTELEMETRY\_\_SERVICE_NAME | tracing.opentelemetry.service_name | Name of the service as displayed in the traces collected | +| TRACING\_\_OPENTELEMETRY\_\_EXPORTER | tracing.opentelemetry.exporter | Name of exporter | +| KUBERNETES\_\_POD_NAME | kubernetes.pod_name | Service pod name | ### OpenTelemetry diff --git a/poetry.lock b/poetry.lock index 835cc7e..ecd8abd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,145 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "aio-pika" +version = "9.4.2" +description = "Wrapper around the aiormq for asyncio and humans" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "aio_pika-9.4.2-py3-none-any.whl", hash = "sha256:22e5fa27d10a3817dd24c031cc477953aaf7c3be5f4f25d2582a55ec229adc4c"}, + {file = "aio_pika-9.4.2.tar.gz", hash = "sha256:d1217dc28d09be9dff96c06cdf2e82c92599a34f154e8932bf35373157f3424d"}, +] + +[package.dependencies] +aiormq = ">=6.8.0,<6.9.0" +yarl = "*" + +[[package]] +name = "aiohttp" +version = "3.9.5" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiormq" +version = "6.8.0" +description = "Pure python AMQP asynchronous client library" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "aiormq-6.8.0-py3-none-any.whl", hash = "sha256:9a16174dcae4078c957a773d2f02d3dfd6c2fcf12c909dc244333a458f2aeab0"}, + {file = "aiormq-6.8.0.tar.gz", hash = "sha256:198f9c7430feb7bc491016099a06266dc45880b6b1de3925d410fde6541a66fb"}, +] + +[package.dependencies] +pamqp = "3.3.0" +yarl = "*" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "adlfs" version = "2024.4.1" @@ -1934,57 +2074,57 @@ full = ["numpy", "ply"] [[package]] name = "grpcio" -version = "1.64.1" +version = "1.65.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, + {file = "grpcio-1.65.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:66ea0ca6108fcb391444bb7b37d04eac85bfaea1cfaf16db675d3734fc74ca1b"}, + {file = "grpcio-1.65.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:45d371dc4436fdcc31677f75b3ebe6175fbf0712ced49e0e4dfc18bbaf50f5a7"}, + {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02dbbe113ec48581da07b7ddf52bfd49f5772374c4b5e36ea25131ce00b4f4f3"}, + {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c9ee7b8f1ac82cc24f223cd7ec803c17079f90e63022d3e66c5e53fff0afb99"}, + {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da927f8a44e42837ae0027a3a063c85e2b26491d2babd4554e116f66fd46045d"}, + {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9916ea670a589f95f2453a4a5040294ace096271c126e684a1e45e61af76c988"}, + {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c46114787c5f530e845d2781f914600aade04b4f132dd012efb31bc4f76a72bb"}, + {file = "grpcio-1.65.0-cp310-cp310-win32.whl", hash = "sha256:1362d94ac9c05b202736180d23296840e00f495859b206261e6ed03a6d41978b"}, + {file = "grpcio-1.65.0-cp310-cp310-win_amd64.whl", hash = "sha256:00ed0828980009ce852d98230cdd2d5a22a4bcb946b5a0f6334dfd8258374cd7"}, + {file = "grpcio-1.65.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:25303f3747522252dd9cfcbacb88d828a36040f513e28fba17ee6184ebc3d330"}, + {file = "grpcio-1.65.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a2b368717dd8e0f6cb7e412d3b3bfb0012f61c04b2f76dbed669b0f5cf3fb0c"}, + {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:93c41fb74c576dc0130b190a5775197282115c6abbe1d913d42d9a2f9d98fdae"}, + {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34eb4fb9ef4d11ea741d264916d1b31a9e169d539a6f1c8300e04c493eec747e"}, + {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55c41272f9d7d3503e3e3e93f3f98589f07075eebd24e1c291a1df2e8ef40a49"}, + {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c275bac926754022c89ef03f16470f65b811e2cc25f2167d365564ad43e31001"}, + {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b02db2a59071f4d05cfc4d0c972759778d27e1d3347f22ca178b91117ad10541"}, + {file = "grpcio-1.65.0-cp311-cp311-win32.whl", hash = "sha256:ec9f41b9b0eb6407a6edb21bc22cb32e03cae76cde9c1d8bb151ed77c2c5af94"}, + {file = "grpcio-1.65.0-cp311-cp311-win_amd64.whl", hash = "sha256:3efc8b0600870f5e518dd2738188b3ba7b1bb2668244c9a2a8c4debda4ffe62b"}, + {file = "grpcio-1.65.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:d787abafafa9ed71e17220d4178c883abdb380e0484bd8965cb2e06375c7495b"}, + {file = "grpcio-1.65.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:52347f21d6ec77d7e7e4d5037f5e8ac0a0c851856d9459f9f95b009c2c740b4a"}, + {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b16e1cd9b9cb9ac942cb20b7a2b1c5d35b9e61017e2998bf242a6f7748071795"}, + {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89bc9c8c6743a48f115fea8f3fada76be269d1914bf636e5fdb7cec9cdf192bc"}, + {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5a2ae900e6423438c4a9a5be38e9228621340a18333371215c0419d24a254ef"}, + {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4f451091ddd28f00c655f0b1e208cca705d40e4fde56a3cf849fead61a700d10"}, + {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4e30cd885e02abb98d6b0d5beb6259a567b0ce1416c498ec815fe383adb77864"}, + {file = "grpcio-1.65.0-cp312-cp312-win32.whl", hash = "sha256:9a9a0ce10a07923ebd48c056060052ebddfbec3193cdd32207af358ef317b00a"}, + {file = "grpcio-1.65.0-cp312-cp312-win_amd64.whl", hash = "sha256:87d9350ffe1a84b7441db7c70fdb4e51269a379f7a95d696d0d133831c4f9a19"}, + {file = "grpcio-1.65.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:0c504b30fc2fba143d9254e0240243b5866df9b7523162448797f4b21b5f30d5"}, + {file = "grpcio-1.65.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:480be4d41ceb5a7f22ecfc8db1ab68aeb58cc1a2da0865a91917d3cd0438dac7"}, + {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:984a1627b50d5df4a24120302ca95adb5139ba1c40354ba258fc2913666d8ee7"}, + {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f242956c0f4985dfcc920cd251cd7a899ca168e157e98c9b74a688657e813ad6"}, + {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ea93f570b2341c69635b8a333afb99fb4d5584f26a9cc94f06e56c943648aab"}, + {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bebefd76517a43d0e77a5dcd61a8b69e9775340d856a0b35c6368ae628f7714"}, + {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:356d10a491a92a08c21aef806379f7b020f591c23580e3d29aeeb59d45908c86"}, + {file = "grpcio-1.65.0-cp38-cp38-win32.whl", hash = "sha256:c3294fd3ef9faa1fe14ad15d72dd7d2ee9fee6d3bd29a08c53e59a3c94de9cc9"}, + {file = "grpcio-1.65.0-cp38-cp38-win_amd64.whl", hash = "sha256:a2defc49c984550f25034e88d17a7e69dba6deb2b981d8f56f19b3aaa788ff30"}, + {file = "grpcio-1.65.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:b73022222ed4bf718d3d8527a9b88b162074a62c7530d30f4e951b56304b0f19"}, + {file = "grpcio-1.65.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16e0f789158ecc8309e0a2f16cb8c5e4753f351a7673aab75f42783c83f1e38b"}, + {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:cb0bd8bfba21fe0318317bf11687c67a3f8ce726369c0b3ccf4e6607fc5bc5f2"}, + {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1096f0fa79ec601aefd71685d3a610cdde96274c38cd8adcef972660297669a"}, + {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e576a88ce82fea70e68c548aceb5cd560c27da50091581996858bbbe01230c83"}, + {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab70bd1ccb05ef373b691a9b9985289d8b2cf63c704471f5ee132e228d351af5"}, + {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03eab632a8ce8dba00d97482d2821bf752a7c3cb4dc051be6c587ad3ca1c3e6d"}, + {file = "grpcio-1.65.0-cp39-cp39-win32.whl", hash = "sha256:f19bb85795ca82e007be427e7b6ac5e730023ffbab69d39ddeb1b84c6339df16"}, + {file = "grpcio-1.65.0-cp39-cp39-win_amd64.whl", hash = "sha256:dbd7eeafa67d8e403ac61caa31ebda2861435dcfd7bb7953c4ef05ad2ecf74bf"}, + {file = "grpcio-1.65.0.tar.gz", hash = "sha256:2c7891f66daefc80cce1bed6bc0c2802d26dac46544ba1be79c4e7d85661dd73"}, ] [package.extras] @@ -3416,6 +3556,21 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pamqp" +version = "3.3.0" +description = "RabbitMQ Focused AMQP low-level library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0"}, + {file = "pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b"}, +] + +[package.extras] +codegen = ["lxml", "requests", "yapf"] +testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", "flake8-import-order", "flake8-print", "flake8-quotes", "flake8-rst-docstrings", "flake8-tuple", "yapf"] + [[package]] name = "parso" version = "0.8.4" @@ -3851,6 +4006,95 @@ files = [ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -4576,6 +4820,8 @@ python-versions = ">=3.8" files = [ {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [package.extras] @@ -4718,6 +4964,21 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "tenacity" +version = "8.5.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "tabulate" version = "0.9.0" diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 6b62f98..68abe5e 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -1,11 +1,14 @@ +import asyncio + from dynaconf import Dynaconf from fastapi import FastAPI from kn_utils.logging import logger from pyinfra.config.loader import get_pyinfra_validators, validate_settings +from pyinfra.queue.async_manager import AsyncQueueManager, RabbitMQConfig from pyinfra.queue.callback import Callback from pyinfra.queue.manager import QueueManager -from pyinfra.utils.opentelemetry import instrument_pika, setup_trace, instrument_app +from pyinfra.utils.opentelemetry import instrument_app, instrument_pika, setup_trace from pyinfra.webserver.prometheus import ( add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings, @@ -34,8 +37,6 @@ def start_standard_queue_consumer( app = app or FastAPI() - queue_manager = QueueManager(settings) - if settings.metrics.prometheus.enabled: logger.info("Prometheus metrics enabled.") app = add_prometheus_endpoint(app) @@ -43,13 +44,40 @@ def start_standard_queue_consumer( if settings.tracing.enabled: setup_trace(settings) - + instrument_pika() instrument_app(app) - - app = add_health_check_endpoint(app, queue_manager.is_ready) + + if settings.concurrency.enabled: + config = RabbitMQConfig( + host=settings.rabbitmq.host, + port=settings.rabbitmq.port, + username=settings.rabbitmq.username, + password=settings.rabbitmq.password, + heartbeat=settings.rabbitmq.heartbeat, + input_queue_prefix=settings.rabbitmq.service_request_queue_prefix, + tenant_event_queue_suffix=settings.rabbitmq.tenant_event_queue_suffix, + tenant_exchange_name=settings.rabbitmq.tenant_exchange_name, + service_request_exchange_name=settings.rabbitmq.service_request_exchange_name, + service_response_exchange_name=settings.rabbitmq.service_response_exchange_name, + service_dead_letter_queue_name=settings.rabbitmq.service_dlq_name, + queue_expiration_time=settings.rabbitmq.queue_expiration_time, + pod_name=settings.kubernetes.pod_name, + ) + manager = AsyncQueueManager( + config=config, tenant_service_url=settings.storage.tenant_server.endpoint, message_processor=callback + ) + else: + manager = QueueManager(settings) + + app = add_health_check_endpoint(app, manager.is_ready) webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() - queue_manager.start_consuming(callback) \ No newline at end of file + if isinstance(manager, AsyncQueueManager): + asyncio.run(manager.run()) + elif isinstance(manager, QueueManager): + manager.start_consuming(callback) + else: + logger.warning(f"Behavior for type {type(manager)} is not defined") diff --git a/pyinfra/queue/async_manager.py b/pyinfra/queue/async_manager.py new file mode 100644 index 0000000..c435ef3 --- /dev/null +++ b/pyinfra/queue/async_manager.py @@ -0,0 +1,258 @@ +import asyncio +import json +import signal +from dataclasses import dataclass, field +from typing import Any, Callable, Dict, Set + +import aiohttp +from aio_pika import ExchangeType, IncomingMessage, Message, connect_robust +from aio_pika.abc import ( + AbstractChannel, + AbstractConnection, + AbstractExchange, + AbstractIncomingMessage, +) +from kn_utils.logging import logger +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential_jitter, +) + + +@dataclass +class RabbitMQConfig: + host: str + port: int + username: str + password: str + heartbeat: int + input_queue_prefix: str + tenant_event_queue_suffix: str + tenant_exchange_name: str + service_request_exchange_name: str + service_response_exchange_name: str + service_dead_letter_queue_name: str + queue_expiration_time: int + pod_name: str + + connection_params: Dict[str, object] = field(init=False) + + def __post_init__(self): + self.connection_params = { + "host": self.host, + "port": self.port, + "login": self.username, + "password": self.password, + "client_properties": {"heartbeat": self.heartbeat}, + } + + +class AsyncQueueManager: + def __init__( + self, + config: RabbitMQConfig, + tenant_service_url: str, + message_processor: Callable[[Dict[str, Any]], Dict[str, Any]], + ): + self.config = config + self.tenant_service_url = tenant_service_url + self.message_processor = message_processor + + self.connection: AbstractConnection | None = None + self.channel: AbstractChannel | None = None + self.tenant_exchange: AbstractExchange | None = None + self.input_exchange: AbstractExchange | None = None + self.output_exchange: AbstractExchange | None = None + self.tenant_queues: Dict[str, AbstractChannel] = {} + + async def connect(self) -> None: + self.connection = await connect_robust(**self.config.connection_params) + self.channel = await self.connection.channel() + await self.channel.set_qos(prefetch_count=1) + + async def is_ready(self) -> bool: + await self.connect() + return await self.channel.is_open + + async def setup_exchanges(self) -> None: + self.tenant_exchange = await self.channel.declare_exchange( + self.config.tenant_exchange_name, ExchangeType.TOPIC, durable=True + ) + self.input_exchange = await self.channel.declare_exchange( + self.config.service_request_exchange_name, ExchangeType.DIRECT, durable=True + ) + self.output_exchange = await self.channel.declare_exchange( + self.config.service_response_exchange_name, ExchangeType.DIRECT, durable=True + ) + + async def setup_tenant_queue(self) -> None: + queue = await self.channel.declare_queue( + f"{self.config.pod_name}_{self.config.tenant_event_queue_suffix}", + durable=True, + arguments={ + "x-dead-letter-exchange": "", + "x-dead-letter-routing-key": self.config.service_dead_letter_queue_name, + "x-expires": self.config.queue_expiration_time, + "x-max-priority": 2, + }, + ) + await queue.bind(self.tenant_exchange, routing_key="tenant.*") + await queue.consume(self.process_tenant_message) + + async def process_tenant_message(self, message: AbstractIncomingMessage) -> None: + async with message.process(): + message_body = json.loads(message.body.decode()) + logger.debug(f"Tenant message received: {message_body}") + tenant_id = message_body["tenantId"] + routing_key = message.routing_key + + if routing_key == "tenant.created": + await self.create_tenant_queues(tenant_id) + elif routing_key == "tenant.delete": + await self.delete_tenant_queues(tenant_id) + + async def create_tenant_queues(self, tenant_id: str) -> None: + queue_name = f"{self.config.input_queue_prefix}_{tenant_id}" + logger.info(f"Declaring queue: {queue_name}") + input_queue = await self.channel.declare_queue( + queue_name, + durable=True, + arguments={ + "x-dead-letter-exchange": "", + "x-dead-letter-routing-key": self.config.service_dead_letter_queue_name, + "x-expires": self.config.queue_expiration_time, + "x-max-priority": 2, + }, + ) + await input_queue.bind(self.input_exchange, routing_key=tenant_id) + await input_queue.consume(self.process_input_message) + + self.tenant_queues[tenant_id] = input_queue + logger.info(f"Created queues for tenant {tenant_id}") + + async def delete_tenant_queues(self, tenant_id: str) -> None: + if tenant_id in self.tenant_queues: + # somehow queue.delete() does not work here + await self.channel.queue_delete(f"{self.config.input_queue_prefix}_{tenant_id}") + del self.tenant_queues[tenant_id] + logger.info(f"Deleted queues for tenant {tenant_id}") + + async def process_input_message(self, message: IncomingMessage) -> None: + async def process_message_body_and_await_result(unpacked_message_body): + return self.message_processor(unpacked_message_body) + + async with message.process(ignore_processed=True): + if message.redelivered: + logger.warning(f"Declining message with {message.delivery_tag=} due to it being redelivered.") + await message.nack(requeue=False) + return + + if message.body.decode("utf-8") == "STOP": + logger.info("Received stop signal, stopping consumption...") + await message.ack() + # TODO: shutdown is probably not the right call here - align w/ Dev what should happen on stop signal + await self.shutdown() + return + + try: + tenant_id = message.routing_key + + filtered_message_headers = ( + {k: v for k, v in message.headers.items() if k.lower().startswith("x-")} if message.headers else {} + ) + + logger.debug(f"Processing message with {filtered_message_headers=}.") + + result: dict = await ( + process_message_body_and_await_result({**json.loads(message.body), **filtered_message_headers}) + or {} + ) + + if result: + await self.publish_to_output_exchange(tenant_id, result, filtered_message_headers) + await message.ack() + logger.debug(f"Message with {message.delivery_tag=} acknowledged.") + else: + raise ValueError(f"Could not process message with {message.body=}.") + + except json.JSONDecodeError: + await message.nack(requeue=False) + logger.error(f"Invalid JSON in input message: {message.body}") + except FileNotFoundError as e: + logger.warning(f"{e}, declining message with {message.delivery_tag=}.") + await message.nack(requeue=False) + except Exception as e: + await message.nack(requeue=False) + logger.error(f"Error processing input message: {e}", exc_info=True) + raise + + async def publish_to_output_exchange(self, tenant_id: str, result: Dict[str, Any], headers: Dict[str, Any]) -> None: + await self.output_exchange.publish( + Message(body=json.dumps(result).encode(), headers=headers), + routing_key=tenant_id, + ) + logger.info(f"Published result to queue {tenant_id}.") + + @retry( + stop=stop_after_attempt(5), + wait=wait_exponential_jitter(initial=1, max=10), + retry=retry_if_exception_type(aiohttp.ClientResponseError), + reraise=True, + ) + async def fetch_active_tenants(self) -> Set[str]: + async with aiohttp.ClientSession() as session: + async with session.get(self.tenant_service_url) as response: + response.raise_for_status() + if response.headers["content-type"].lower() == "application/json": + data = await response.json() + return {tenant["tenantId"] for tenant in data} + else: + logger.error( + f"Failed to fetch active tenants. Content type is not JSON: {response.headers['content-type'].lower()}" + ) + return set() + + async def initialize_tenant_queues(self) -> None: + try: + active_tenants = await self.fetch_active_tenants() + except aiohttp.ClientResponseError: + logger.warning("API calls to tenant server failed. No tenant queues initialized.") + active_tenants = set() + for tenant_id in active_tenants: + await self.create_tenant_queues(tenant_id) + + async def run(self) -> None: + stop = asyncio.Event() + + def signal_handler(*_): + logger.info("Signal received, shutting down...") + stop.set() + + loop = asyncio.get_running_loop() + for sig in (signal.SIGINT, signal.SIGTERM): + loop.add_signal_handler(sig, signal_handler) + + try: + await self.connect() + await self.setup_exchanges() + await self.initialize_tenant_queues() + await self.setup_tenant_queue() + + logger.info("RabbitMQ handler is running. Press CTRL+C to exit.") + await stop.wait() # Run until stop signal received + except asyncio.CancelledError: + logger.warning("Operation cancelled.") + except Exception as e: + logger.error(f"An error occurred: {e}", exc_info=True) + finally: + await self.shutdown() + + async def shutdown(self) -> None: + logger.info("Shutting down RabbitMQ handler...") + if self.channel: + await self.channel.close() + if self.connection: + await self.connection.close() + logger.info("RabbitMQ handler shut down successfully.") diff --git a/pyinfra/storage/utils.py b/pyinfra/storage/utils.py index f2fc5e4..efc9fbb 100644 --- a/pyinfra/storage/utils.py +++ b/pyinfra/storage/utils.py @@ -20,6 +20,17 @@ class DossierIdFileIdDownloadPayload(BaseModel): return f"{self.dossierId}/{self.fileId}.{self.targetFileExtension}" +class TenantIdDossierIdFileIdDownloadPayload(BaseModel): + tenantId: str + dossierId: str + fileId: str + targetFileExtension: str + + @property + def targetFilePath(self): + return f"{self.tenantId}/{self.dossierId}/{self.fileId}.{self.targetFileExtension}" + + class DossierIdFileIdUploadPayload(BaseModel): dossierId: str fileId: str @@ -28,6 +39,17 @@ class DossierIdFileIdUploadPayload(BaseModel): @property def responseFilePath(self): return f"{self.dossierId}/{self.fileId}.{self.responseFileExtension}" + + +class TenantIdDossierIdFileIdUploadPayload(BaseModel): + tenantId: str + dossierId: str + fileId: str + responseFileExtension: str + + @property + def responseFilePath(self): + return f"{self.tenantId}/{self.dossierId}/{self.fileId}.{self.responseFileExtension}" class TargetResponseFilePathDownloadPayload(BaseModel): @@ -56,7 +78,9 @@ def download_data_as_specified_in_message(storage: Storage, raw_payload: dict) - """ try: - if "dossierId" in raw_payload: + if "tenantId" in raw_payload and "dossierId" in raw_payload: + payload = TenantIdDossierIdFileIdDownloadPayload(**raw_payload) + elif "tenantId" not in raw_payload and "dossierId" in raw_payload: payload = DossierIdFileIdDownloadPayload(**raw_payload) else: payload = TargetResponseFilePathDownloadPayload(**raw_payload) @@ -114,7 +138,9 @@ def upload_data_as_specified_in_message(storage: Storage, raw_payload: dict, dat """ try: - if "dossierId" in raw_payload: + if "tenantId" in raw_payload and "dossierId" in raw_payload: + payload = TenantIdDossierIdFileIdUploadPayload(**raw_payload) + elif "tenantId" not in raw_payload and "dossierId" in raw_payload: payload = DossierIdFileIdUploadPayload(**raw_payload) else: payload = TargetResponseFilePathUploadPayload(**raw_payload) diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index 7a05233..66611ae 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -39,13 +39,16 @@ def setup_trace(settings: Dynaconf, service_name: str = None, exporter: SpanExpo if tracing_type == "azure_monitor": # Configure OpenTelemetry to use Azure Monitor with the # APPLICATIONINSIGHTS_CONNECTION_STRING environment variable. - logger.info("Azure Monitor tracing enabled.") - configure_azure_monitor() + try: + configure_azure_monitor() + logger.info("Azure Monitor tracing enabled.") + except Exception as exception: + logger.warning(f"Azure Monitor tracing could not be enabled: {exception}") elif tracing_type == "opentelemetry": - logger.info("OpenTelemetry tracing enabled.") configure_opentelemtry_tracing(settings, service_name, exporter) + logger.info("OpenTelemetry tracing enabled.") else: - raise Exception(f"Unknown tracing type: {tracing_type}") + logger.warning(f"Unknown tracing type: {tracing_type}. Tracing could not be enabled.") def configure_opentelemtry_tracing(settings: Dynaconf, service_name: str = None, exporter: SpanExporter = None): diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index 8ca4e1d..9a2a438 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -1,3 +1,4 @@ +import inspect import logging import threading from typing import Callable @@ -8,15 +9,10 @@ from fastapi import FastAPI from pyinfra.config.loader import validate_settings from pyinfra.config.validators import webserver_validators -from pyinfra.utils.opentelemetry import instrument_app, setup_trace def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: validate_settings(settings, validators=webserver_validators) - - if settings.tracing.enabled: - return create_webserver_thread_with_tracing(app, settings) - return create_webserver_thread(app=app, port=settings.webserver.port, host=settings.webserver.host) @@ -29,18 +25,6 @@ def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thr return thread -def create_webserver_thread_with_tracing(app: FastAPI, settings: Dynaconf) -> threading.Thread: - def inner(): - setup_trace(settings) - instrument_app(app) - uvicorn.run(app, port=settings.webserver.port, host=settings.webserver.host, log_level=logging.WARNING) - - thread = threading.Thread(target=inner) - thread.daemon = True - - return thread - - HealthFunction = Callable[[], bool] @@ -48,13 +32,23 @@ def add_health_check_endpoint(app: FastAPI, health_function: HealthFunction) -> """Add a health check endpoint to the app. The health function should return True if the service is healthy, and False otherwise. The health function is called when the endpoint is hit. """ + if inspect.iscoroutinefunction(health_function): - @app.get("/health") - @app.get("/ready") - def check_health(): - if health_function(): - return {"status": "OK"}, 200 - else: + @app.get("/health") + @app.get("/ready") + async def async_check_health(): + alive = await health_function() + if alive: + return {"status": "OK"}, 200 + return {"status": "Service Unavailable"}, 503 + + else: + + @app.get("/health") + @app.get("/ready") + def check_health(): + if health_function(): + return {"status": "OK"}, 200 return {"status": "Service Unavailable"}, 503 return app diff --git a/pyproject.toml b/pyproject.toml index 436617d..937c269 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyinfra" -version = "2.3.0" +version = "3.0.0" description = "" authors = ["Team Research "] license = "All rights reseverd" @@ -35,6 +35,9 @@ opentelemetry-instrumentation-fastapi = "^0.46b0" wcwidth = "<=0.2.12" azure-monitor-opentelemetry = "^1.6.0" protobuf = "^4.25.3" # FIXME: update to ^5.27.2 after opentelemetry is updated (see README.md/Protobuf) +aio-pika = "^9.4.2" +aiohttp = "^3.9.5" +tenacity = "^8.5.0" [tool.poetry.group.dev.dependencies] pytest = "^7" diff --git a/scripts/send_async_request.py b/scripts/send_async_request.py new file mode 100644 index 0000000..931b2b8 --- /dev/null +++ b/scripts/send_async_request.py @@ -0,0 +1,150 @@ +import asyncio +import gzip +import json +from operator import itemgetter +from typing import Any, Dict + +from aio_pika import Message +from aio_pika.abc import AbstractIncomingMessage +from kn_utils.logging import logger + +from pyinfra.config.loader import load_settings, local_pyinfra_root_path +from pyinfra.queue.async_manager import AsyncQueueManager, RabbitMQConfig +from pyinfra.storage.storages.s3 import S3Storage, get_s3_storage_from_settings + +settings = load_settings(local_pyinfra_root_path / "config/") + + +async def dummy_message_processor(message: Dict[str, Any]) -> Dict[str, Any]: + logger.info(f"Processing message: {message}") + # await asyncio.sleep(1) # Simulate processing time + + storage = get_s3_storage_from_settings(settings) + tenant_id, dossier_id, file_id = itemgetter("tenantId", "dossierId", "fileId")(message) + suffix = message["responseFileExtension"] + + object_name = f"{tenant_id}/{dossier_id}/{file_id}.{message['targetFileExtension']}" + original_content = json.loads(gzip.decompress(storage.get_object(object_name))) + processed_content = { + "processedPages": original_content["numberOfPages"], + "processedSectionTexts": f"Processed: {original_content['sectionTexts']}", + } + + processed_object_name = f"{tenant_id}/{dossier_id}/{file_id}.{suffix}" + processed_data = gzip.compress(json.dumps(processed_content).encode("utf-8")) + storage.put_object(processed_object_name, processed_data) + + processed_message = message.copy() + processed_message["processed"] = True + processed_message["processor_message"] = "This message was processed by the dummy processor" + + logger.info(f"Finished processing message. Result: {processed_message}") + return processed_message + + +async def on_response_message_callback(storage: S3Storage): + async def on_message(message: AbstractIncomingMessage) -> None: + async with message.process(ignore_processed=True): + if not message.body: + raise ValueError + response = json.loads(message.body) + logger.info(f"Received {response}") + logger.info(f"Message headers: {message.properties.headers}") + await message.ack() + tenant_id, dossier_id, file_id = itemgetter("tenantId", "dossierId", "fileId")(response) + suffix = response["responseFileExtension"] + result = storage.get_object(f"{tenant_id}/{dossier_id}/{file_id}.{suffix}") + result = json.loads(gzip.decompress(result)) + logger.info(f"Contents of result on storage: {result}") + + return on_message + + +def upload_json_and_make_message_body(tenant_id: str): + dossier_id, file_id, suffix = "dossier", "file", "json.gz" + content = { + "numberOfPages": 7, + "sectionTexts": "data", + } + + object_name = f"{tenant_id}/{dossier_id}/{file_id}.{suffix}" + data = gzip.compress(json.dumps(content).encode("utf-8")) + + storage = get_s3_storage_from_settings(settings) + if not storage.has_bucket(): + storage.make_bucket() + storage.put_object(object_name, data) + + message_body = { + "tenantId": tenant_id, + "dossierId": dossier_id, + "fileId": file_id, + "targetFileExtension": suffix, + "responseFileExtension": f"result.{suffix}", + } + return message_body, storage + + +async def test_rabbitmq_handler() -> None: + tenant_service_url = settings.storage.tenant_server.endpoint + + config = RabbitMQConfig( + host=settings.rabbitmq.host, + port=settings.rabbitmq.port, + username=settings.rabbitmq.username, + password=settings.rabbitmq.password, + heartbeat=settings.rabbitmq.heartbeat, + input_queue_prefix=settings.rabbitmq.service_request_queue_prefix, + tenant_event_queue_suffix=settings.rabbitmq.tenant_event_queue_suffix, + tenant_exchange_name=settings.rabbitmq.tenant_exchange_name, + service_request_exchange_name=settings.rabbitmq.service_request_exchange_name, + service_response_exchange_name=settings.rabbitmq.service_response_exchange_name, + service_dead_letter_queue_name=settings.rabbitmq.service_dlq_name, + queue_expiration_time=settings.rabbitmq.queue_expiration_time, + pod_name=settings.kubernetes.pod_name, + ) + + handler = AsyncQueueManager(config, tenant_service_url, dummy_message_processor) + + await handler.connect() + await handler.setup_exchanges() + + tenant_id = "test_tenant" + + # Test tenant creation + create_message = {"tenantId": tenant_id} + await handler.tenant_exchange.publish( + Message(body=json.dumps(create_message).encode()), routing_key="tenant.created" + ) + logger.info(f"Sent create tenant message for {tenant_id}") + await asyncio.sleep(0.5) # Wait for queue creation + + # Prepare service request + service_request, storage = upload_json_and_make_message_body(tenant_id) + + # Test service request + await handler.input_exchange.publish(Message(body=json.dumps(service_request).encode()), routing_key=tenant_id) + logger.info(f"Sent service request for {tenant_id}") + await asyncio.sleep(5) # Wait for message processing + + # Consume service request + response_queue = await handler.channel.declare_queue(name=f"response_queue_{tenant_id}") + await response_queue.bind(exchange=handler.output_exchange, routing_key=tenant_id) + callback = await on_response_message_callback(storage) + await response_queue.consume(callback=callback) + + await asyncio.sleep(5) # Wait for message processing + + # Test tenant deletion + delete_message = {"tenantId": tenant_id} + await handler.tenant_exchange.publish( + Message(body=json.dumps(delete_message).encode()), routing_key="tenant.delete" + ) + logger.info(f"Sent delete tenant message for {tenant_id}") + await asyncio.sleep(0.5) # Wait for queue deletion + + await handler.connection.close() + + +if __name__ == "__main__": + asyncio.run(test_rabbitmq_handler()) diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml index c53537c..108a437 100644 --- a/tests/docker-compose.yml +++ b/tests/docker-compose.yml @@ -1,31 +1,41 @@ -version: '2' +version: '3.8' services: minio: - image: minio/minio:RELEASE.2022-06-11T19-55-32Z + image: minio/minio:latest + container_name: minio ports: - "9000:9000" environment: - MINIO_ROOT_PASSWORD=password - MINIO_ROOT_USER=root volumes: - - /tmp/minio_store:/data + - /tmp/data/minio_store:/data command: server /data - network_mode: "bridge" + network_mode: "bridge" + extra_hosts: + - "host.docker.internal:host-gateway" rabbitmq: - image: docker.io/bitnami/rabbitmq:3.9.8 + image: docker.io/bitnami/rabbitmq:latest + container_name: rabbitmq ports: - - '4369:4369' - - '5551:5551' - - '5552:5552' + # - '4369:4369' + # - '5551:5551' + # - '5552:5552' - '5672:5672' - - '25672:25672' - '15672:15672' + # - '25672:25672' environment: - RABBITMQ_SECURE_PASSWORD=yes - RABBITMQ_VM_MEMORY_HIGH_WATERMARK=100% - RABBITMQ_DISK_FREE_ABSOLUTE_LIMIT=20Gi + - RABBITMQ_MANAGEMENT_ALLOW_WEB_ACCESS=true network_mode: "bridge" volumes: - - /opt/bitnami/rabbitmq/.rabbitmq/:/data/bitnami -volumes: - mdata: \ No newline at end of file + - /tmp/bitnami/rabbitmq/.rabbitmq/:/data/bitnami + healthcheck: + test: [ "CMD", "curl", "-f", "http://localhost:15672" ] + interval: 30s + timeout: 10s + retries: 5 + extra_hosts: + - "host.docker.internal:host-gateway"