From 48d74b430779c8778d259ae2cde71f3b5c3eb2aa Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 18 Aug 2023 11:15:47 +0200 Subject: [PATCH 1/7] Add support for absolute file paths Introduces new payload parsing logic to be able to process absolute file paths. The queue message is expected to contain the keys "targetFilePath" and "responseFilePath". To ensure backward-compatibility, the legacy "dossierId", "fileId" messages are still supported. --- pyinfra/config.py | 2 + pyinfra/payload_processing/payload.py | 143 +++++++++++++++++------- pyinfra/payload_processing/processor.py | 27 ++--- tests/payload_parsing_test.py | 10 +- tests/payload_processor_test.py | 4 +- 5 files changed, 124 insertions(+), 62 deletions(-) diff --git a/pyinfra/config.py b/pyinfra/config.py index fe2d72e..396e20d 100644 --- a/pyinfra/config.py +++ b/pyinfra/config.py @@ -92,6 +92,8 @@ class Config: self.allowed_file_types = ["json", "pdf"] self.allowed_compression_types = ["gz"] + self.allowed_processing_parameters = ["operation"] + # config for x-tenant-endpoint to receive storage connection information per tenant self.tenant_decryption_public_key = read_from_environment("TENANT_PUBLIC_KEY", "redaction") self.tenant_endpoint = read_from_environment("TENANT_ENDPOINT", "http://tenant-user-management:8081/internal-api/tenants") diff --git a/pyinfra/payload_processing/payload.py b/pyinfra/payload_processing/payload.py index ef5f394..5557d43 100644 --- a/pyinfra/payload_processing/payload.py +++ b/pyinfra/payload_processing/payload.py @@ -1,59 +1,97 @@ from dataclasses import dataclass +from functools import singledispatch +from funcy import project from itertools import chain from operator import itemgetter from typing import Union, Sized -from funcy import project - +from pyinfra import logger from pyinfra.config import Config from pyinfra.utils.file_extension_parsing import make_file_extension_parser @dataclass class QueueMessagePayload: - dossier_id: str - file_id: str - x_tenant_id: Union[str, None] + """Default one-to-one payload, where the message contains the absolute file paths for the target and response files, + that have to be acquired from the storage.""" - target_file_extension: str - response_file_extension: str + target_file_path: str + response_file_path: str target_file_type: Union[str, None] target_compression_type: Union[str, None] response_file_type: Union[str, None] response_compression_type: Union[str, None] - target_file_name: str - response_file_name: str + x_tenant_id: Union[str, None] processing_kwargs: dict +@dataclass +class LegacyQueueMessagePayload(QueueMessagePayload): + """Legacy one-to-one payload, where the message contains the dossier and file ids, and the file extensions that have + to be used to construct the absolute file paths for the target and response files, that have to be acquired from the + storage.""" + + dossier_id: str + file_id: str + + target_file_extension: str + response_file_extension: str + + class QueueMessagePayloadParser: - def __init__(self, file_extension_parser, allowed_processing_args=("operation",)): + def __init__(self, file_extension_parser, allowed_processing_parameters): self.parse_file_extensions = file_extension_parser - self.allowed_args = allowed_processing_args + self.allowed_processing_params = allowed_processing_parameters def __call__(self, payload: dict) -> QueueMessagePayload: - """Translate the queue message payload to the internal QueueMessagePayload object.""" - return self._parse_queue_message_payload(payload) + if maybe_legacy_payload(payload): + logger.debug("Legacy payload detected.") + return self._parse_legacy_queue_message_payload(payload) + else: + return self._parse_queue_message_payload(payload) def _parse_queue_message_payload(self, payload: dict) -> QueueMessagePayload: + target_file_path, response_file_path = itemgetter("targetFilePath", "responseFilePath")(payload) + + target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( + map(self.parse_file_extensions, [target_file_path, response_file_path]) + ) + + x_tenant_id = payload.get("X-TENANT-ID") + + processing_kwargs = project(payload, self.allowed_processing_params) + + return QueueMessagePayload( + target_file_path=target_file_path, + response_file_path=response_file_path, + target_file_type=target_file_type, + target_compression_type=target_compression_type, + response_file_type=response_file_type, + response_compression_type=response_compression_type, + x_tenant_id=x_tenant_id, + processing_kwargs=processing_kwargs, + ) + + def _parse_legacy_queue_message_payload(self, payload: dict) -> LegacyQueueMessagePayload: dossier_id, file_id, target_file_extension, response_file_extension = itemgetter( "dossierId", "fileId", "targetFileExtension", "responseFileExtension" )(payload) - x_tenant_id = payload.get("X-TENANT-ID") + + target_file_path = f"{dossier_id}/{file_id}.{target_file_extension}" + response_file_path = f"{dossier_id}/{file_id}.{response_file_extension}" target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( map(self.parse_file_extensions, [target_file_extension, response_file_extension]) ) - target_file_name = f"{dossier_id}/{file_id}.{target_file_extension}" - response_file_name = f"{dossier_id}/{file_id}.{response_file_extension}" + x_tenant_id = payload.get("X-TENANT-ID") - processing_kwargs = project(payload, self.allowed_args) + processing_kwargs = project(payload, self.allowed_processing_params) - return QueueMessagePayload( + return LegacyQueueMessagePayload( dossier_id=dossier_id, file_id=file_id, x_tenant_id=x_tenant_id, @@ -63,36 +101,59 @@ class QueueMessagePayloadParser: target_compression_type=target_compression_type, response_file_type=response_file_type, response_compression_type=response_compression_type, - target_file_name=target_file_name, - response_file_name=response_file_name, + target_file_path=target_file_path, + response_file_path=response_file_path, processing_kwargs=processing_kwargs, ) +def maybe_legacy_payload(payload: dict) -> bool: + return {"dossierId", "fileId", "targetFileExtension", "responseFileExtension"}.issubset(payload.keys()) + + def get_queue_message_payload_parser(config: Config) -> QueueMessagePayloadParser: file_extension_parser = make_file_extension_parser(config.allowed_file_types, config.allowed_compression_types) - return QueueMessagePayloadParser(file_extension_parser) + return QueueMessagePayloadParser(file_extension_parser, config.allowed_processing_parameters) -class QueueMessagePayloadFormatter: - @staticmethod - def format_service_processing_result_for_storage( - queue_message_payload: QueueMessagePayload, service_processing_result: Sized - ) -> dict: - """Format the results of a processing function with the QueueMessagePayload for the storage upload.""" - return { - "dossierId": queue_message_payload.dossier_id, - "fileId": queue_message_payload.file_id, - "targetFileExtension": queue_message_payload.target_file_extension, - "responseFileExtension": queue_message_payload.response_file_extension, - "data": service_processing_result, - } - - @staticmethod - def format_to_queue_message_response_body(queue_message_payload: QueueMessagePayload) -> dict: - """Format QueueMessagePayload for the AMPQ response after processing.""" - return {"dossierId": queue_message_payload.dossier_id, "fileId": queue_message_payload.file_id} +@singledispatch +def format_service_processing_result_for_storage(payload: QueueMessagePayload, result: Sized) -> dict: + raise NotImplementedError("Unsupported payload type") -def get_queue_message_payload_formatter() -> QueueMessagePayloadFormatter: - return QueueMessagePayloadFormatter() +@format_service_processing_result_for_storage.register(LegacyQueueMessagePayload) +def _(payload: LegacyQueueMessagePayload, result: Sized) -> dict: + return { + "dossierId": payload.dossier_id, + "fileId": payload.file_id, + "targetFileExtension": payload.target_file_extension, + "responseFileExtension": payload.response_file_extension, + "data": result, + } + + +@format_service_processing_result_for_storage.register(QueueMessagePayload) +def _(payload: QueueMessagePayload, result: Sized) -> dict: + return { + "targetFilePath": payload.target_file_path, + "responseFilePath": payload.response_file_path, + "data": result, + } + + +@singledispatch +def format_to_queue_message_response_body(queue_message_payload: QueueMessagePayload) -> dict: + raise NotImplementedError("Unsupported payload type") + + +@format_to_queue_message_response_body.register(LegacyQueueMessagePayload) +def _(queue_message_payload: LegacyQueueMessagePayload) -> dict: + return {"dossierId": queue_message_payload.dossier_id, "fileId": queue_message_payload.file_id} + + +@format_to_queue_message_response_body.register(QueueMessagePayload) +def _(queue_message_payload: QueueMessagePayload) -> dict: + return { + "targetFilePath": queue_message_payload.target_file_path, + "responseFilePath": queue_message_payload.response_file_path, + } diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py index e4359f9..5f98d22 100644 --- a/pyinfra/payload_processing/processor.py +++ b/pyinfra/payload_processing/processor.py @@ -8,8 +8,9 @@ from pyinfra.payload_processing.monitor import get_monitor_from_config from pyinfra.payload_processing.payload import ( QueueMessagePayloadParser, get_queue_message_payload_parser, - QueueMessagePayloadFormatter, - get_queue_message_payload_formatter, + format_service_processing_result_for_storage, + format_to_queue_message_response_body, + QueueMessagePayload, ) from pyinfra.storage.storage import make_downloader, make_uploader from pyinfra.storage.storage_info import ( @@ -29,7 +30,6 @@ class PayloadProcessor: default_storage_info: StorageInfo, get_storage_info_from_tenant_id, payload_parser: QueueMessagePayloadParser, - payload_formatter: QueueMessagePayloadFormatter, data_processor: Callable, ): """Wraps an analysis function specified by a service (e.g. NER service) in pre- and post-processing steps. @@ -39,14 +39,11 @@ class PayloadProcessor: x_tenant_id is not provided in the queue payload. get_storage_info_from_tenant_id: Callable to acquire storage info from a given tenant id. payload_parser: Parser that translates the queue message payload to the required QueueMessagePayload object - payload_formatter: Formatter for the storage upload result and the queue message response body data_processor: The analysis function to be called with the downloaded file NOTE: The result of the analysis function has to be an instance of `Sized`, e.g. a dict or a list to be able to upload it and to be able to monitor the processing time. """ self.parse_payload = payload_parser - self.format_result_for_storage = payload_formatter.format_service_processing_result_for_storage - self.format_to_queue_message_response_body = payload_formatter.format_to_queue_message_response_body self.process_data = data_processor self.get_storage_info_from_tenant_id = get_storage_info_from_tenant_id @@ -71,8 +68,11 @@ class PayloadProcessor: return self._process(queue_message_payload) def _process(self, queue_message_payload: dict) -> dict: - payload = self.parse_payload(queue_message_payload) - logger.info(f"Processing {asdict(payload)} ...") + logger.info(f"Processing Payload ...") + + payload: QueueMessagePayload = self.parse_payload(queue_message_payload) + + logger.debug(f"Payload: {asdict(payload)} ...") storage_info = self._get_storage_info(payload.x_tenant_id) storage = get_storage_from_storage_info(storage_info) @@ -84,15 +84,14 @@ class PayloadProcessor: upload_processing_result = make_uploader( storage, bucket, payload.response_file_type, payload.response_compression_type ) - format_result_for_storage = partial(self.format_result_for_storage, payload) - data = download_file_to_process(payload.target_file_name) + data = download_file_to_process(payload.target_file_path) result: List[dict] = self.process_data(data, **payload.processing_kwargs) - formatted_result = format_result_for_storage(result) + formatted_result = format_service_processing_result_for_storage(payload, result) - upload_processing_result(payload.response_file_name, formatted_result) + upload_processing_result(payload.response_file_path, formatted_result) - return self.format_to_queue_message_response_body(payload) + return format_to_queue_message_response_body(payload) def _get_storage_info(self, x_tenant_id=None): if x_tenant_id: @@ -118,7 +117,6 @@ def make_payload_processor(data_processor: Callable, config: Config = None) -> P ) monitor = get_monitor_from_config(config) payload_parser: QueueMessagePayloadParser = get_queue_message_payload_parser(config) - payload_formatter: QueueMessagePayloadFormatter = get_queue_message_payload_formatter() data_processor = monitor(data_processor) @@ -126,6 +124,5 @@ def make_payload_processor(data_processor: Callable, config: Config = None) -> P default_storage_info, get_storage_info_from_tenant_id, payload_parser, - payload_formatter, data_processor, ) diff --git a/tests/payload_parsing_test.py b/tests/payload_parsing_test.py index 303f301..a66fb61 100644 --- a/tests/payload_parsing_test.py +++ b/tests/payload_parsing_test.py @@ -1,15 +1,15 @@ import pytest from pyinfra.payload_processing.payload import ( - QueueMessagePayload, QueueMessagePayloadParser, + LegacyQueueMessagePayload, ) from pyinfra.utils.file_extension_parsing import make_file_extension_parser @pytest.fixture def expected_parsed_payload(x_tenant_id): - return QueueMessagePayload( + return LegacyQueueMessagePayload( dossier_id="test", file_id="test", x_tenant_id=x_tenant_id, @@ -19,8 +19,8 @@ def expected_parsed_payload(x_tenant_id): target_compression_type="gz", response_file_type="json", response_compression_type="gz", - target_file_name="test/test.json.gz", - response_file_name="test/test.json.gz", + target_file_path="test/test.json.gz", + response_file_path="test/test.json.gz", processing_kwargs={}, ) @@ -32,7 +32,7 @@ def file_extension_parser(allowed_file_types, allowed_compression_types): @pytest.fixture def payload_parser(file_extension_parser): - return QueueMessagePayloadParser(file_extension_parser) + return QueueMessagePayloadParser(file_extension_parser, allowed_processing_parameters=["operation"]) @pytest.mark.parametrize("allowed_file_types,allowed_compression_types", [(["json", "pdf"], ["gz"])]) diff --git a/tests/payload_processor_test.py b/tests/payload_processor_test.py index 0c48ac4..b7a8b23 100644 --- a/tests/payload_processor_test.py +++ b/tests/payload_processor_test.py @@ -66,7 +66,9 @@ class TestPayloadProcessor: with pytest.raises(Exception): payload_processor(payload) - def test_prometheus_endpoint_is_available(self, test_storage_config, monitoring_enabled, storage_backend, x_tenant_id): + def test_prometheus_endpoint_is_available( + self, test_storage_config, monitoring_enabled, storage_backend, x_tenant_id + ): if monitoring_enabled: resp = requests.get( f"http://{test_storage_config.prometheus_host}:{test_storage_config.prometheus_port}/prometheus" From ef916ee790a953388e08b46e80f6daa1a00cd537 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 18 Aug 2023 12:39:11 +0200 Subject: [PATCH 2/7] Refactor payload processing logic Streamlines payload processor class by encapsulating closely dependent logic, to improve readability and maintainability. --- pyinfra/payload_processing/processor.py | 62 +++++++------------------ pyinfra/storage/storage_manager.py | 41 ++++++++++++++++ 2 files changed, 58 insertions(+), 45 deletions(-) create mode 100644 pyinfra/storage/storage_manager.py diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py index 5f98d22..19e1823 100644 --- a/pyinfra/payload_processing/processor.py +++ b/pyinfra/payload_processing/processor.py @@ -1,6 +1,5 @@ import logging from dataclasses import asdict -from functools import partial from typing import Callable, List from pyinfra.config import get_config, Config @@ -13,12 +12,7 @@ from pyinfra.payload_processing.payload import ( QueueMessagePayload, ) from pyinfra.storage.storage import make_downloader, make_uploader -from pyinfra.storage.storage_info import ( - get_storage_info_from_config, - get_storage_info_from_endpoint, - StorageInfo, - get_storage_from_storage_info, -) +from pyinfra.storage.storage_manager import StorageManager logger = logging.getLogger() logger.setLevel(get_config().logging_level_root) @@ -27,28 +21,23 @@ logger.setLevel(get_config().logging_level_root) class PayloadProcessor: def __init__( self, - default_storage_info: StorageInfo, - get_storage_info_from_tenant_id, + storage_manager: StorageManager, payload_parser: QueueMessagePayloadParser, data_processor: Callable, ): """Wraps an analysis function specified by a service (e.g. NER service) in pre- and post-processing steps. Args: - default_storage_info: The default storage info used to create the storage connection. This is only used if - x_tenant_id is not provided in the queue payload. - get_storage_info_from_tenant_id: Callable to acquire storage info from a given tenant id. + storage_manager: Storage manager that connects to the storage, using the tenant id if provided payload_parser: Parser that translates the queue message payload to the required QueueMessagePayload object data_processor: The analysis function to be called with the downloaded file NOTE: The result of the analysis function has to be an instance of `Sized`, e.g. a dict or a list to be able to upload it and to be able to monitor the processing time. """ self.parse_payload = payload_parser + self.connect_storage = storage_manager self.process_data = data_processor - self.get_storage_info_from_tenant_id = get_storage_info_from_tenant_id - self.default_storage_info = default_storage_info - def __call__(self, queue_message_payload: dict) -> dict: """Processes a queue message payload. @@ -60,29 +49,29 @@ class PayloadProcessor: Args: queue_message_payload: The payload of a queue message. The payload is expected to be a dict with the - following keys: dossierId, fileId, targetFileExtension, responseFileExtension + following keys: + targetFilePath, responseFilePath + OR + dossierId, fileId, targetFileExtension, responseFileExtension Returns: - The payload for a response queue message. The payload is a dict with the following keys: dossierId, fileId + The payload for a response queue message, containing only the request payload. """ return self._process(queue_message_payload) def _process(self, queue_message_payload: dict) -> dict: - logger.info(f"Processing Payload ...") - payload: QueueMessagePayload = self.parse_payload(queue_message_payload) - logger.debug(f"Payload: {asdict(payload)} ...") + logger.info(f"Processing {payload.__class__.__name__} ...") + logger.debug(f"Payload contents: {asdict(payload)} ...") - storage_info = self._get_storage_info(payload.x_tenant_id) - storage = get_storage_from_storage_info(storage_info) - bucket = storage_info.bucket_name + storage, storage_info = self.connect_storage(payload.x_tenant_id) download_file_to_process = make_downloader( - storage, bucket, payload.target_file_type, payload.target_compression_type + storage, storage_info.bucket, payload.target_file_type, payload.target_compression_type ) upload_processing_result = make_uploader( - storage, bucket, payload.response_file_type, payload.response_compression_type + storage, storage_info.bucket, payload.response_file_type, payload.response_compression_type ) data = download_file_to_process(payload.target_file_path) @@ -93,36 +82,19 @@ class PayloadProcessor: return format_to_queue_message_response_body(payload) - def _get_storage_info(self, x_tenant_id=None): - if x_tenant_id: - storage_info = self.get_storage_info_from_tenant_id(x_tenant_id) - logger.info(f"Received {storage_info.__class__.__name__} for {x_tenant_id} from endpoint.") - logger.debug(f"{asdict(storage_info)}") - else: - storage_info = self.default_storage_info - logger.info(f"Using local default {storage_info.__class__.__name__} for {x_tenant_id}.") - logger.debug(f"{asdict(storage_info)}") - return storage_info - def make_payload_processor(data_processor: Callable, config: Config = None) -> PayloadProcessor: - """Produces payload processor for queue manager.""" + """Creates a payload processor.""" config = config or get_config() - default_storage_info: StorageInfo = get_storage_info_from_config(config) - get_storage_info_from_tenant_id = partial( - get_storage_info_from_endpoint, - config.tenant_decryption_public_key, - config.tenant_endpoint, - ) + storage_manager = StorageManager(config) monitor = get_monitor_from_config(config) payload_parser: QueueMessagePayloadParser = get_queue_message_payload_parser(config) data_processor = monitor(data_processor) return PayloadProcessor( - default_storage_info, - get_storage_info_from_tenant_id, + storage_manager, payload_parser, data_processor, ) diff --git a/pyinfra/storage/storage_manager.py b/pyinfra/storage/storage_manager.py new file mode 100644 index 0000000..71d2519 --- /dev/null +++ b/pyinfra/storage/storage_manager.py @@ -0,0 +1,41 @@ +from dataclasses import asdict +from functools import partial, lru_cache +from typing import Tuple + +from pyinfra import logger +from pyinfra.config import Config +from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo +from pyinfra.storage.storages.interface import Storage + + +class StorageManager: + def __init__(self, config: Config): + self.config = config + self.default_storage_info: StorageInfo = get_storage_info_from_config(config) + + self.get_storage_info_from_tenant_id = partial( + get_storage_info_from_endpoint, + config.tenant_decryption_public_key, + config.tenant_endpoint, + ) + + def __call__(self, *args, **kwargs): + return self.connect(*args, **kwargs) + + @lru_cache(maxsize=32) + def connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]: + storage_info = self._get_storage_info(x_tenant_id) + storage_connection = storage_info.get_storage() + return storage_connection, storage_info + + def _get_storage_info(self, x_tenant_id=None): + if x_tenant_id: + storage_info = self.get_storage_info_from_tenant_id(x_tenant_id) + logger.debug(f"Received {storage_info.__class__.__name__} for {x_tenant_id} from endpoint.") + logger.trace(f"{asdict(storage_info)}") + else: + storage_info = self.default_storage_info + logger.debug(f"Using local default {storage_info.__class__.__name__} for {x_tenant_id}.") + logger.trace(f"{asdict(storage_info)}") + + return storage_info From 7187f0ec0cd99cfd740c40e706644dc5637f329b Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 18 Aug 2023 14:33:42 +0200 Subject: [PATCH 3/7] RES-343 Update logging to knutils logger --- poetry.lock | 601 ++++++++++++++---------- pyinfra/__init__.py | 18 - pyinfra/k8s_probes/__init__.py | 3 - pyinfra/k8s_probes/startup.py | 8 +- pyinfra/payload_processing/__init__.py | 3 - pyinfra/payload_processing/monitor.py | 9 +- pyinfra/payload_processing/payload.py | 2 +- pyinfra/payload_processing/processor.py | 4 +- pyinfra/queue/__init__.py | 3 - pyinfra/queue/queue_manager.py | 3 +- pyinfra/storage/__init__.py | 3 - pyinfra/storage/storage_manager.py | 2 +- pyinfra/storage/storages/azure.py | 11 +- pyinfra/storage/storages/s3.py | 10 +- pyproject.toml | 14 +- 15 files changed, 380 insertions(+), 314 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7abcb80..b3c189a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,19 +49,19 @@ test = ["astroid", "pytest"] [[package]] name = "azure-core" -version = "1.28.0" +version = "1.29.2" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.28.0.zip", hash = "sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd"}, - {file = "azure_core-1.28.0-py3-none-any.whl", hash = "sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9"}, + {file = "azure-core-1.29.2.zip", hash = "sha256:beb0fe88d1043d8457318e8fb841d9caa648211092eda213c16b376401f3710d"}, + {file = "azure_core-1.29.2-py3-none-any.whl", hash = "sha256:8e6602f322dc1070caf7e17754beb53b69ffa09df0f4786009a3107e9a00c793"}, ] [package.dependencies] requests = ">=2.18.4" six = ">=1.11.0" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] @@ -145,13 +145,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] @@ -316,13 +316,13 @@ files = [ [[package]] name = "click" -version = "8.1.5" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, - {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -341,17 +341,17 @@ files = [ [[package]] name = "comm" -version = "0.1.3" +version = "0.1.4" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.6" files = [ - {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, - {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, + {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, + {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, ] [package.dependencies] -traitlets = ">=5.3" +traitlets = ">=4" [package.extras] lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] @@ -360,71 +360,63 @@ typing = ["mypy (>=0.990)"] [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, + {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, + {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, + {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, + {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, + {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, + {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, + {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, + {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, + {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, + {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, + {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, + {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, ] [package.extras] @@ -432,34 +424,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.2" +version = "41.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, - {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, - {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, - {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, + {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, + {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, + {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, ] [package.dependencies] @@ -477,29 +469,29 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "debugpy" -version = "1.6.7" +version = "1.6.7.post1" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.7" files = [ - {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, - {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, - {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, - {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, - {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, - {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, - {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, - {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, - {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, - {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, - {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, - {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, - {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, - {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, - {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, - {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, - {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, - {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:903bd61d5eb433b6c25b48eae5e23821d4c1a19e25c9610205f5aeaccae64e32"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16882030860081e7dd5aa619f30dec3c2f9a421e69861125f83cc372c94e57d"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-win32.whl", hash = "sha256:eea8d8cfb9965ac41b99a61f8e755a8f50e9a20330938ad8271530210f54e09c"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:85969d864c45f70c3996067cfa76a319bae749b04171f2cdeceebe4add316155"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:890f7ab9a683886a0f185786ffbda3b46495c4b929dab083b8c79d6825832a52"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ac7a4dba28801d184b7fc0e024da2635ca87d8b0a825c6087bb5168e3c0d28"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-win32.whl", hash = "sha256:3370ef1b9951d15799ef7af41f8174194f3482ee689988379763ef61a5456426"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:65b28435a17cba4c09e739621173ff90c515f7b9e8ea469b92e3c28ef8e5cdfb"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92b6dae8bfbd497c90596bbb69089acf7954164aea3228a99d7e43e5267f5b36"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f5d2ecead8125cf669e62784ef1e6300f4067b0f14d9f95ee00ae06fc7c4f7"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-win32.whl", hash = "sha256:f0851403030f3975d6e2eaa4abf73232ab90b98f041e3c09ba33be2beda43fcf"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3de5d0f97c425dc49bce4293df6a04494309eedadd2b52c22e58d95107e178d9"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38651c3639a4e8bbf0ca7e52d799f6abd07d622a193c406be375da4d510d968d"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038c51268367c9c935905a90b1c2d2dbfe304037c27ba9d19fe7409f8cdc710c"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-win32.whl", hash = "sha256:4b9eba71c290852f959d2cf8a03af28afd3ca639ad374d393d53d367f7f685b2"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-win_amd64.whl", hash = "sha256:973a97ed3b434eab0f792719a484566c35328196540676685c975651266fccf9"}, + {file = "debugpy-1.6.7.post1-py2.py3-none-any.whl", hash = "sha256:1093a5c541af079c13ac8c70ab8b24d1d35c8cacb676306cf11e57f699c02926"}, + {file = "debugpy-1.6.7.post1.zip", hash = "sha256:fe87ec0182ef624855d05e6ed7e0b7cb1359d2ffa2a925f8ec2d22e98b75d0ca"}, ] [[package]] @@ -529,13 +521,13 @@ packaging = "*" [[package]] name = "dill" -version = "0.3.6" -description = "serialize all of python" +version = "0.3.7" +description = "serialize all of Python" optional = false python-versions = ">=3.7" files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, ] [package.extras] @@ -562,15 +554,36 @@ websocket-client = ">=0.32.0" [package.extras] ssh = ["paramiko (>=2.4.3)"] +[[package]] +name = "dynaconf" +version = "3.2.1" +description = "The dynamic configurator for your Python Project" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dynaconf-3.2.1-py2.py3-none-any.whl", hash = "sha256:a4af12524f1fc527c6c0cdd4bb38cf83992d5155ad516baa98d9d01b7a731d09"}, + {file = "dynaconf-3.2.1.tar.gz", hash = "sha256:00dbd7541ca0f99bcb207cfc8aee0ac8f7d6b32bbb372e5b2865f0cb829b06c3"}, +] + +[package.extras] +all = ["configobj", "hvac", "redis", "ruamel.yaml"] +configobj = ["configobj"] +ini = ["configobj"] +redis = ["redis"] +test = ["configobj", "django", "flake8", "flake8-debugger", "flake8-print", "flake8-todo", "flask (>=0.12)", "hvac", "pep8-naming", "pytest", "pytest-cov", "pytest-mock", "pytest-xdist", "python-dotenv", "radon", "redis", "toml"] +toml = ["toml"] +vault = ["hvac"] +yaml = ["ruamel.yaml"] + [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -644,13 +657,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.24.0" +version = "6.25.1" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, - {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, + {file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"}, + {file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"}, ] [package.dependencies] @@ -747,21 +760,21 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" -version = "0.18.2" +version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, + {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, + {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, ] [package.dependencies] -parso = ">=0.8.0,<0.9.0" +parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] @@ -807,6 +820,28 @@ traitlets = ">=5.3" docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "kn-utils" +version = "0.1.4" +description = "Shared code related to logging for research & development." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "kn_utils-0.1.4-py3-none-any.whl", hash = "sha256:7be365a24e509e72d12202c800701787dc1df616e15dab855e0aedb2bd075e5e"}, + {file = "kn_utils-0.1.4.tar.gz", hash = "sha256:a9f64a4afe4773fb70d222a286faaa9f9585d744fd175febbdd42acbe6d00381"}, +] + +[package.dependencies] +dynaconf = ">=3.1.12,<4.0.0" +funcy = ">=1.17,<2.0" +loguru = ">=0.6,<0.7" +pytest-loguru = ">=0.2.0,<0.3.0" + +[package.source] +type = "legacy" +url = "https://gitlab.knecon.com/api/v4/groups/19/-/packages/pypi/simple" +reference = "gitlab-research" + [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -852,6 +887,24 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"] + [[package]] name = "matplotlib-inline" version = "0.1.6" @@ -879,13 +932,13 @@ files = [ [[package]] name = "minio" -version = "7.1.15" +version = "7.1.16" description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" optional = false python-versions = "*" files = [ - {file = "minio-7.1.15-py3-none-any.whl", hash = "sha256:1afdf01c1bc8b57ddd12d438e3e168d625465b56f4d1c2af7576744c688e84c6"}, - {file = "minio-7.1.15.tar.gz", hash = "sha256:fcf8ac2cef310d5ddff2bef2c42f4e5a8bb546b87bca5bf8832135db054ca4e1"}, + {file = "minio-7.1.16-py3-none-any.whl", hash = "sha256:8073bed2b4b1853f3d69ab2f01a0de86264071083032985921201cfbb0950b15"}, + {file = "minio-7.1.16.tar.gz", hash = "sha256:56ecb1e7e0103d2dc212fb460fdb70ab2abb7fa5685db378429325d96d95587a"}, ] [package.dependencies] @@ -905,13 +958,13 @@ files = [ [[package]] name = "nest-asyncio" -version = "1.5.6" +version = "1.5.7" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, + {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, + {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, ] [[package]] @@ -942,13 +995,13 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -994,18 +1047,18 @@ twisted = ["twisted"] [[package]] name = "platformdirs" -version = "3.9.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" @@ -1166,13 +1219,13 @@ files = [ [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -1180,17 +1233,17 @@ plugins = ["importlib-metadata"] [[package]] name = "pylint" -version = "2.17.4" +version = "2.17.5" description = "python code static checker" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, - {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, + {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, + {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, ] [package.dependencies] -astroid = ">=2.15.4,<=2.17.0-dev0" +astroid = ">=2.15.6,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1229,6 +1282,20 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-loguru" +version = "0.2.0" +description = "Pytest Loguru" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-loguru-0.2.0.tar.gz", hash = "sha256:6588efbc5d4ab87b05a9a37fdd0be6464d290dc985bc2fb0e5b8425fca7fb93a"}, +] + +[package.dependencies] +loguru = "*" +pytest = "*" + [[package]] name = "python-dateutil" version = "2.8.2" @@ -1268,88 +1335,104 @@ files = [ [[package]] name = "pyzmq" -version = "25.1.0" +version = "25.1.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, - {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, - {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, - {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, - {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, - {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, - {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, - {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, - {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, - {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, - {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, - {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, ] [package.dependencies] @@ -1467,33 +1550,33 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.8" +version = "0.12.1" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] [[package]] name = "tornado" -version = "6.3.2" +version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, - {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, - {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, - {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, ] [[package]] @@ -1524,13 +1607,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.3" +version = "2.0.4" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, - {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, ] [package.extras] @@ -1566,6 +1649,20 @@ docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + [[package]] name = "wrapt" version = "1.15.0" @@ -1668,4 +1765,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "c20df2427734eb268e9af0aa354880a83f2e72eab38963136931b73a83ace940" +content-hash = "870c5035049f28a5271499ce077e36fbf36a6dadf14df315240fc35fe4a7fe3b" diff --git a/pyinfra/__init__.py b/pyinfra/__init__.py index a9605b6..8b13789 100644 --- a/pyinfra/__init__.py +++ b/pyinfra/__init__.py @@ -1,19 +1 @@ -from pyinfra import config, k8s_probes, queue, storage -__all__ = ["k8s_probes", "queue", "storage", "config"] - -CONFIG = config.get_config() - -import logging -import sys - -# log config -LOG_FORMAT = "%(asctime)s [%(levelname)s] - [%(filename)s -> %(funcName)s() -> %(lineno)s] : %(message)s" -DATE_FORMAT = "%Y-%m-%d %H:%M:%S" -stream_handler = logging.StreamHandler(sys.stdout) -stream_handler_format = logging.Formatter(LOG_FORMAT, datefmt=DATE_FORMAT) -stream_handler.setFormatter(stream_handler_format) - -logger = logging.getLogger() -logger.setLevel(CONFIG.logging_level_root) -logger.addHandler(stream_handler) diff --git a/pyinfra/k8s_probes/__init__.py b/pyinfra/k8s_probes/__init__.py index 8db2d74..e69de29 100644 --- a/pyinfra/k8s_probes/__init__.py +++ b/pyinfra/k8s_probes/__init__.py @@ -1,3 +0,0 @@ -from pyinfra.k8s_probes import startup - -__all__ = ["startup"] diff --git a/pyinfra/k8s_probes/startup.py b/pyinfra/k8s_probes/startup.py index 986bbe8..9a8a183 100644 --- a/pyinfra/k8s_probes/startup.py +++ b/pyinfra/k8s_probes/startup.py @@ -1,5 +1,5 @@ -import logging import sys +from kn_utils.logging import logger from pathlib import Path from pyinfra.queue.queue_manager import token_file_name @@ -22,10 +22,10 @@ def check_token_file(): contents = token_file.read().strip() return contents != "" - # We're intentionally do not handle exception here, since we're only using this in a short script. + # We intentionally do not handle exception here, since we're only using this in a short script. # Take care to expand this if the intended use changes - except Exception: - logging.getLogger(__file__).info("Caught exception when reading from token file", exc_info=True) + except Exception as err: + logger.warning(f"{err}: Caught exception when reading from token file", exc_info=True) return False diff --git a/pyinfra/payload_processing/__init__.py b/pyinfra/payload_processing/__init__.py index c350215..e69de29 100644 --- a/pyinfra/payload_processing/__init__.py +++ b/pyinfra/payload_processing/__init__.py @@ -1,3 +0,0 @@ -__all__ = ["make_payload_processor"] - -from pyinfra.payload_processing.processor import make_payload_processor diff --git a/pyinfra/payload_processing/monitor.py b/pyinfra/payload_processing/monitor.py index 1a97eb5..5ea2d94 100644 --- a/pyinfra/payload_processing/monitor.py +++ b/pyinfra/payload_processing/monitor.py @@ -1,15 +1,11 @@ -import logging +from funcy import identity from operator import attrgetter +from prometheus_client import Summary, start_http_server, CollectorRegistry from time import time from typing import Callable, Any, Sized -from funcy import identity -from prometheus_client import Summary, start_http_server, CollectorRegistry - from pyinfra.config import Config -logger = logging.getLogger() - class PrometheusMonitor: def __init__(self, prefix: str, host: str, port: int): @@ -36,7 +32,6 @@ class PrometheusMonitor: def _add_result_monitoring(self, process_fn: Callable): def inner(data: Any, **kwargs): - start = time() result: Sized = process_fn(data, **kwargs) diff --git a/pyinfra/payload_processing/payload.py b/pyinfra/payload_processing/payload.py index 5557d43..4a54b15 100644 --- a/pyinfra/payload_processing/payload.py +++ b/pyinfra/payload_processing/payload.py @@ -5,7 +5,7 @@ from itertools import chain from operator import itemgetter from typing import Union, Sized -from pyinfra import logger +from kn_utils.logging import logger from pyinfra.config import Config from pyinfra.utils.file_extension_parsing import make_file_extension_parser diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py index 19e1823..c125857 100644 --- a/pyinfra/payload_processing/processor.py +++ b/pyinfra/payload_processing/processor.py @@ -1,4 +1,4 @@ -import logging +from kn_utils.logging import getLogger from dataclasses import asdict from typing import Callable, List @@ -14,7 +14,7 @@ from pyinfra.payload_processing.payload import ( from pyinfra.storage.storage import make_downloader, make_uploader from pyinfra.storage.storage_manager import StorageManager -logger = logging.getLogger() +logger = getLogger() logger.setLevel(get_config().logging_level_root) diff --git a/pyinfra/queue/__init__.py b/pyinfra/queue/__init__.py index f269935..e69de29 100644 --- a/pyinfra/queue/__init__.py +++ b/pyinfra/queue/__init__.py @@ -1,3 +0,0 @@ -from pyinfra.queue import queue_manager - -__all__ = ["queue_manager"] diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/queue_manager.py index f5ffb98..2d0d608 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/queue_manager.py @@ -3,6 +3,7 @@ import concurrent.futures import json import logging import signal +from kn_utils.logging import getLogger from pathlib import Path import pika @@ -58,7 +59,7 @@ class QueueManager: """Handle RabbitMQ message reception & delivery""" def __init__(self, config: Config): - self.logger = logging.getLogger(__name__) + self.logger = getLogger(__name__) self.logger.setLevel(config.logging_level_root) self._input_queue = config.request_queue diff --git a/pyinfra/storage/__init__.py b/pyinfra/storage/__init__.py index dccdcda..e69de29 100644 --- a/pyinfra/storage/__init__.py +++ b/pyinfra/storage/__init__.py @@ -1,3 +0,0 @@ -from pyinfra.storage.storage import get_storage_from_config - -__all__ = ["get_storage_from_config"] diff --git a/pyinfra/storage/storage_manager.py b/pyinfra/storage/storage_manager.py index 71d2519..b0e2338 100644 --- a/pyinfra/storage/storage_manager.py +++ b/pyinfra/storage/storage_manager.py @@ -1,8 +1,8 @@ from dataclasses import asdict from functools import partial, lru_cache +from kn_utils.logging import logger from typing import Tuple -from pyinfra import logger from pyinfra.config import Config from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo from pyinfra.storage.storages.interface import Storage diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index f6091a4..4602da9 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -1,16 +1,13 @@ import logging -from itertools import repeat -from operator import attrgetter - from azure.storage.blob import BlobServiceClient, ContainerClient +from itertools import repeat +from kn_utils.logging import logger +from operator import attrgetter from retry import retry -from pyinfra.config import Config, get_config +from pyinfra.config import Config from pyinfra.storage.storages.interface import Storage -CONFIG = get_config() -logger = logging.getLogger(CONFIG.logging_level_root) - logging.getLogger("azure").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index 3eafeff..cdf35af 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -1,17 +1,13 @@ import io -import logging from itertools import repeat -from operator import attrgetter - +from kn_utils.logging import logger from minio import Minio +from operator import attrgetter from retry import retry -from pyinfra.config import Config, get_config +from pyinfra.config import Config from pyinfra.storage.storages.interface import Storage -CONFIG = get_config() -logger = logging.getLogger(CONFIG.logging_level_root) - class S3Storage(Storage): def __init__(self, client: Minio): diff --git a/pyproject.toml b/pyproject.toml index 1b14e42..389b6d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyinfra" -version = "1.5.10" +version = "1.6.0" description = "" authors = ["Team Research "] license = "All rights reseverd" @@ -16,6 +16,7 @@ azure-storage-blob = "^12.9.0" funcy = "^1.17" prometheus-client = "^0.16.0" pycryptodome = "^3.17" +kn-utils = { version = "0.1.4", source = "gitlab-research" } [tool.poetry.group.dev.dependencies] pytest = "^7.1.3" @@ -33,6 +34,15 @@ testpaths = ["tests", "integration"] log_cli = 1 log_cli_level = "DEBUG" +[[tool.poetry.source]] +name = "PyPI" +priority = "primary" + +[[tool.poetry.source]] +name = "gitlab-research" +url = "https://gitlab.knecon.com/api/v4/groups/19/-/packages/pypi/simple" +priority = "explicit" + [build-system] requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry.core.masonry.api" \ No newline at end of file From 294688ea6687e197c603cf3c88f7853b018ab50d Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Mon, 21 Aug 2023 10:24:54 +0200 Subject: [PATCH 4/7] RED-7002 Forward exceptions from thread context PyInfra now reports exceptions that happen inside the processing callback. Also refactors queue manager logging to fit new logger by changing "%s", var logic to f string, since this syntax is not supported with knutlis logging. --- pyinfra/queue/queue_manager.py | 58 ++++++++++++++-------------------- 1 file changed, 23 insertions(+), 35 deletions(-) diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/queue_manager.py index 2d0d608..7301506 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/queue_manager.py @@ -2,12 +2,11 @@ import atexit import concurrent.futures import json import logging -import signal -from kn_utils.logging import getLogger -from pathlib import Path - import pika import pika.exceptions +import signal +from kn_utils.logging import logger +from pathlib import Path from pika.adapters.blocking_connection import BlockingChannel from pyinfra.config import Config @@ -59,9 +58,6 @@ class QueueManager: """Handle RabbitMQ message reception & delivery""" def __init__(self, config: Config): - self.logger = getLogger(__name__) - self.logger.setLevel(config.logging_level_root) - self._input_queue = config.request_queue self._output_queue = config.response_queue self._dead_letter_queue = config.dead_letter_queue @@ -123,16 +119,16 @@ class QueueManager: """ callback = self._create_queue_callback(process_payload) self._set_consumer_token(None) - self.logger.info("Consuming from queue") + logger.info("Consuming from queue ...") try: self._open_channel() self._set_consumer_token(self._channel.basic_consume(self._input_queue, callback)) - self.logger.info("Registered with consumer-tag: %s", self._consumer_token) + logger.info(f"Registered with consumer-tag: {self._consumer_token}") self._channel.start_consuming() except Exception: - self.logger.error( + logger.error( "An unexpected exception occurred while consuming messages. Consuming will stop.", exc_info=True ) raise @@ -143,75 +139,67 @@ class QueueManager: def stop_consuming(self): if self._consumer_token and self._connection: - self.logger.info("Cancelling subscription for consumer-tag %s", self._consumer_token) + logger.info(f"Cancelling subscription for consumer-tag {self._consumer_token}") self._channel.stop_consuming(self._consumer_token) self._set_consumer_token(None) def _handle_stop_signal(self, signal_number, _stack_frame, *args, **kwargs): - self.logger.info("Received signal %s", signal_number) + logger.info(f"Received signal {signal_number}") self.stop_consuming() def _create_queue_callback(self, process_payload: PayloadProcessor): def process_message_body_and_await_result(unpacked_message_body): with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: - self.logger.debug("Processing payload in separate thread") + logger.debug("Processing payload in separate thread.") future = thread_pool_executor.submit(process_payload, unpacked_message_body) while future.running(): - self.logger.debug("Waiting for payload processing to finish") + logger.debug("Waiting for payload processing to finish...") self._connection.sleep(float(self._connection_sleep)) try: return future.result() except Exception as err: - raise ProcessingFailure("QueueMessagePayload processing failed") from err + raise ProcessingFailure(f"QueueMessagePayload processing failed: {repr(err)}") from err def acknowledge_message_and_publish_response(frame, headers, response_body): response_properties = pika.BasicProperties(headers=headers) if headers else None self._channel.basic_publish("", self._output_queue, json.dumps(response_body).encode(), response_properties) - self.logger.info( - "Result published, acknowledging incoming message with delivery_tag %s", - frame.delivery_tag, - ) + logger.info(f"Result published, acknowledging incoming message with delivery_tag {frame.delivery_tag}.") self._channel.basic_ack(frame.delivery_tag) def callback(_channel, frame, properties, body): - - self.logger.info("Received message from queue with delivery_tag %s", frame.delivery_tag) - self.logger.debug("Message headers: %s", properties.headers) + logger.info(f"Received message from queue with delivery_tag {frame.delivery_tag}.") + logger.debug(f"Message headers: {properties.headers}") # Only try to process each message once. Re-queueing will be handled by the dead-letter-exchange. This # prevents endless retries on messages that are impossible to process. if frame.redelivered: - self.logger.info( - "Aborting message processing for delivery_tag %s due to it being redelivered", - frame.delivery_tag, + logger.info( + f"Aborting message processing for delivery_tag {frame.delivery_tag} due to it being redelivered.", ) self._channel.basic_nack(frame.delivery_tag, requeue=False) return try: - self.logger.debug("Processing (%s, %s, %s)", frame, properties, body) + logger.debug(f"Processing {frame}, {properties}, {body}") filtered_message_headers = safe_project(properties.headers, ["X-TENANT-ID"]) # TODO: parametrize key? message_body = {**json.loads(body), **filtered_message_headers} processing_result = process_message_body_and_await_result(message_body) - self.logger.info( - "Processed message with delivery_tag %s, publishing result to result-queue", - frame.delivery_tag, + logger.info( + f"Processed message with delivery_tag {frame.delivery_tag}, publishing result to result-queue." ) acknowledge_message_and_publish_response(frame, filtered_message_headers, processing_result) - except ProcessingFailure: - self.logger.info( - "Processing message with delivery_tag %s failed, declining", - frame.delivery_tag, - ) + except ProcessingFailure as err: + logger.info(f"Processing message with delivery_tag {frame.delivery_tag} failed, declining.") + logger.debug(f"ProcessingFailure: {err}") self._channel.basic_nack(frame.delivery_tag, requeue=False) except Exception: n_attempts = _get_n_previous_attempts(properties) + 1 - self.logger.warning("Failed to process message, %s attempts", n_attempts, exc_info=True) + logger.warning(f"Failed to process message, {n_attempts}", exc_info=True) self._channel.basic_nack(frame.delivery_tag, requeue=False) raise From e580a66347b7f85b910059d03c1e572526300ac1 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Mon, 21 Aug 2023 15:11:08 +0200 Subject: [PATCH 5/7] Refactor storage provider & payload parser Applies strategy pattern to payload parsing logic to improve maintainability and testability. Renames storage manager to storage provider. --- pyinfra/payload_processing/payload.py | 166 ++++++++++-------- pyinfra/payload_processing/processor.py | 18 +- ...storage_manager.py => storage_provider.py} | 7 +- 3 files changed, 109 insertions(+), 82 deletions(-) rename pyinfra/storage/{storage_manager.py => storage_provider.py} (89%) diff --git a/pyinfra/payload_processing/payload.py b/pyinfra/payload_processing/payload.py index 4a54b15..178effb 100644 --- a/pyinfra/payload_processing/payload.py +++ b/pyinfra/payload_processing/payload.py @@ -1,11 +1,10 @@ from dataclasses import dataclass -from functools import singledispatch -from funcy import project +from functools import singledispatch, partial +from funcy import project, complement from itertools import chain from operator import itemgetter -from typing import Union, Sized +from typing import Union, Sized, Callable, List -from kn_utils.logging import logger from pyinfra.config import Config from pyinfra.utils.file_extension_parsing import make_file_extension_parser @@ -42,78 +41,105 @@ class LegacyQueueMessagePayload(QueueMessagePayload): class QueueMessagePayloadParser: - def __init__(self, file_extension_parser, allowed_processing_parameters): - self.parse_file_extensions = file_extension_parser - self.allowed_processing_params = allowed_processing_parameters + def __init__(self, payload_matcher2parse_strategy: dict): + self.payload_matcher2parse_strategy = payload_matcher2parse_strategy def __call__(self, payload: dict) -> QueueMessagePayload: - if maybe_legacy_payload(payload): - logger.debug("Legacy payload detected.") - return self._parse_legacy_queue_message_payload(payload) - else: - return self._parse_queue_message_payload(payload) - - def _parse_queue_message_payload(self, payload: dict) -> QueueMessagePayload: - target_file_path, response_file_path = itemgetter("targetFilePath", "responseFilePath")(payload) - - target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( - map(self.parse_file_extensions, [target_file_path, response_file_path]) - ) - - x_tenant_id = payload.get("X-TENANT-ID") - - processing_kwargs = project(payload, self.allowed_processing_params) - - return QueueMessagePayload( - target_file_path=target_file_path, - response_file_path=response_file_path, - target_file_type=target_file_type, - target_compression_type=target_compression_type, - response_file_type=response_file_type, - response_compression_type=response_compression_type, - x_tenant_id=x_tenant_id, - processing_kwargs=processing_kwargs, - ) - - def _parse_legacy_queue_message_payload(self, payload: dict) -> LegacyQueueMessagePayload: - dossier_id, file_id, target_file_extension, response_file_extension = itemgetter( - "dossierId", "fileId", "targetFileExtension", "responseFileExtension" - )(payload) - - target_file_path = f"{dossier_id}/{file_id}.{target_file_extension}" - response_file_path = f"{dossier_id}/{file_id}.{response_file_extension}" - - target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( - map(self.parse_file_extensions, [target_file_extension, response_file_extension]) - ) - - x_tenant_id = payload.get("X-TENANT-ID") - - processing_kwargs = project(payload, self.allowed_processing_params) - - return LegacyQueueMessagePayload( - dossier_id=dossier_id, - file_id=file_id, - x_tenant_id=x_tenant_id, - target_file_extension=target_file_extension, - response_file_extension=response_file_extension, - target_file_type=target_file_type, - target_compression_type=target_compression_type, - response_file_type=response_file_type, - response_compression_type=response_compression_type, - target_file_path=target_file_path, - response_file_path=response_file_path, - processing_kwargs=processing_kwargs, - ) - - -def maybe_legacy_payload(payload: dict) -> bool: - return {"dossierId", "fileId", "targetFileExtension", "responseFileExtension"}.issubset(payload.keys()) + for payload_matcher, parse_strategy in self.payload_matcher2parse_strategy.items(): + if payload_matcher(payload): + return parse_strategy(payload) def get_queue_message_payload_parser(config: Config) -> QueueMessagePayloadParser: file_extension_parser = make_file_extension_parser(config.allowed_file_types, config.allowed_compression_types) - return QueueMessagePayloadParser(file_extension_parser, config.allowed_processing_parameters) + + payload_matcher2parse_strategy = get_payload_matcher2parse_strategy( + file_extension_parser, config.allowed_processing_parameters + ) + + return QueueMessagePayloadParser(payload_matcher2parse_strategy) + + +def get_payload_matcher2parse_strategy(parse_file_extensions: Callable, allowed_processing_parameters: List[str]): + return { + is_legacy_payload: partial( + parse_legacy_queue_message_payload, + parse_file_extensions=parse_file_extensions, + allowed_processing_parameters=allowed_processing_parameters, + ), + complement(is_legacy_payload): partial( + parse_queue_message_payload, + parse_file_extensions=parse_file_extensions, + allowed_processing_parameters=allowed_processing_parameters, + ), + } + + +def is_legacy_payload(payload: dict) -> bool: + return {"dossierId", "fileId", "targetFileExtension", "responseFileExtension"}.issubset(payload.keys()) + + +def parse_queue_message_payload( + payload: dict, + parse_file_extensions: Callable, + allowed_processing_parameters: List[str], +) -> QueueMessagePayload: + target_file_path, response_file_path = itemgetter("targetFilePath", "responseFilePath")(payload) + + target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( + map(parse_file_extensions, [target_file_path, response_file_path]) + ) + + x_tenant_id = payload.get("X-TENANT-ID") + + processing_kwargs = project(payload, allowed_processing_parameters) + + return QueueMessagePayload( + target_file_path=target_file_path, + response_file_path=response_file_path, + target_file_type=target_file_type, + target_compression_type=target_compression_type, + response_file_type=response_file_type, + response_compression_type=response_compression_type, + x_tenant_id=x_tenant_id, + processing_kwargs=processing_kwargs, + ) + + +def parse_legacy_queue_message_payload( + payload: dict, + parse_file_extensions: Callable, + allowed_processing_parameters: List[str], +) -> LegacyQueueMessagePayload: + dossier_id, file_id, target_file_extension, response_file_extension = itemgetter( + "dossierId", "fileId", "targetFileExtension", "responseFileExtension" + )(payload) + + target_file_path = f"{dossier_id}/{file_id}.{target_file_extension}" + response_file_path = f"{dossier_id}/{file_id}.{response_file_extension}" + + target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( + map(parse_file_extensions, [target_file_extension, response_file_extension]) + ) + + x_tenant_id = payload.get("X-TENANT-ID") + + processing_kwargs = project(payload, allowed_processing_parameters) + + return LegacyQueueMessagePayload( + dossier_id=dossier_id, + file_id=file_id, + x_tenant_id=x_tenant_id, + target_file_extension=target_file_extension, + response_file_extension=response_file_extension, + target_file_type=target_file_type, + target_compression_type=target_compression_type, + response_file_type=response_file_type, + response_compression_type=response_compression_type, + target_file_path=target_file_path, + response_file_path=response_file_path, + processing_kwargs=processing_kwargs, + ) @singledispatch diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py index c125857..2670af8 100644 --- a/pyinfra/payload_processing/processor.py +++ b/pyinfra/payload_processing/processor.py @@ -12,7 +12,7 @@ from pyinfra.payload_processing.payload import ( QueueMessagePayload, ) from pyinfra.storage.storage import make_downloader, make_uploader -from pyinfra.storage.storage_manager import StorageManager +from pyinfra.storage.storage_provider import StorageProvider logger = getLogger() logger.setLevel(get_config().logging_level_root) @@ -21,21 +21,21 @@ logger.setLevel(get_config().logging_level_root) class PayloadProcessor: def __init__( self, - storage_manager: StorageManager, + storage_provider: StorageProvider, payload_parser: QueueMessagePayloadParser, data_processor: Callable, ): """Wraps an analysis function specified by a service (e.g. NER service) in pre- and post-processing steps. Args: - storage_manager: Storage manager that connects to the storage, using the tenant id if provided + storage_provider: Storage manager that connects to the storage, using the tenant id if provided payload_parser: Parser that translates the queue message payload to the required QueueMessagePayload object data_processor: The analysis function to be called with the downloaded file NOTE: The result of the analysis function has to be an instance of `Sized`, e.g. a dict or a list to be able to upload it and to be able to monitor the processing time. """ self.parse_payload = payload_parser - self.connect_storage = storage_manager + self.provide_storage = storage_provider self.process_data = data_processor def __call__(self, queue_message_payload: dict) -> dict: @@ -65,13 +65,13 @@ class PayloadProcessor: logger.info(f"Processing {payload.__class__.__name__} ...") logger.debug(f"Payload contents: {asdict(payload)} ...") - storage, storage_info = self.connect_storage(payload.x_tenant_id) + storage, storage_info = self.provide_storage(payload.x_tenant_id) download_file_to_process = make_downloader( - storage, storage_info.bucket, payload.target_file_type, payload.target_compression_type + storage, storage_info.bucket_name, payload.target_file_type, payload.target_compression_type ) upload_processing_result = make_uploader( - storage, storage_info.bucket, payload.response_file_type, payload.response_compression_type + storage, storage_info.bucket_name, payload.response_file_type, payload.response_compression_type ) data = download_file_to_process(payload.target_file_path) @@ -87,14 +87,14 @@ def make_payload_processor(data_processor: Callable, config: Config = None) -> P """Creates a payload processor.""" config = config or get_config() - storage_manager = StorageManager(config) + storage_provider = StorageProvider(config) monitor = get_monitor_from_config(config) payload_parser: QueueMessagePayloadParser = get_queue_message_payload_parser(config) data_processor = monitor(data_processor) return PayloadProcessor( - storage_manager, + storage_provider, payload_parser, data_processor, ) diff --git a/pyinfra/storage/storage_manager.py b/pyinfra/storage/storage_provider.py similarity index 89% rename from pyinfra/storage/storage_manager.py rename to pyinfra/storage/storage_provider.py index b0e2338..e5e6900 100644 --- a/pyinfra/storage/storage_manager.py +++ b/pyinfra/storage/storage_provider.py @@ -4,11 +4,12 @@ from kn_utils.logging import logger from typing import Tuple from pyinfra.config import Config -from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo +from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo, \ + get_storage_from_storage_info from pyinfra.storage.storages.interface import Storage -class StorageManager: +class StorageProvider: def __init__(self, config: Config): self.config = config self.default_storage_info: StorageInfo = get_storage_info_from_config(config) @@ -25,7 +26,7 @@ class StorageManager: @lru_cache(maxsize=32) def connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]: storage_info = self._get_storage_info(x_tenant_id) - storage_connection = storage_info.get_storage() + storage_connection = get_storage_from_storage_info(storage_info) return storage_connection, storage_info def _get_storage_info(self, x_tenant_id=None): From c09476cfaea4037930a3c9cb7d5dea310ac34606 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 22 Aug 2023 10:45:54 +0200 Subject: [PATCH 6/7] Update tests All components from payload processing downwards are tested. Tests that depend on docker compose have been disabled by default because they take too long to use during development. Furthermore, the queue manager tests are not stable, a refactoring with inversion of control is urgently needed to make the components properly testable. The storage tests are stable and should be run once before releasing, this should be implemented via the CI script. Also adds, if present, tenant Id and operation kwargs to storage and queue response. --- pyinfra/payload_processing/payload.py | 24 ++- pyinfra/storage/storage_provider.py | 21 +- pyinfra/storage/storages/mock.py | 36 ++++ pytest.ini | 5 + tests/conftest.py | 187 ++++++++++++------ tests/lru_test.py | 48 ----- tests/payload_parsing_test.py | 54 ----- tests/payload_processor_test.py | 76 ------- .../docker-compose.yml | 0 .../queue_manager_test.py | 0 .../storage_test.py | 0 tests/{ => unit_tests}/cipher_test.py | 0 .../unit_tests/file_extension_parsing_test.py | 32 +++ tests/{ => unit_tests}/monitor_test.py | 0 tests/unit_tests/payload_test.py | 48 +++++ tests/unit_tests/processor_test.py | 81 ++++++++ 16 files changed, 361 insertions(+), 251 deletions(-) create mode 100644 pyinfra/storage/storages/mock.py create mode 100644 pytest.ini delete mode 100644 tests/lru_test.py delete mode 100644 tests/payload_parsing_test.py delete mode 100644 tests/payload_processor_test.py rename tests/{ => tests_with_docker_compose}/docker-compose.yml (100%) rename tests/{ => tests_with_docker_compose}/queue_manager_test.py (100%) rename tests/{ => tests_with_docker_compose}/storage_test.py (100%) rename tests/{ => unit_tests}/cipher_test.py (100%) create mode 100644 tests/unit_tests/file_extension_parsing_test.py rename tests/{ => unit_tests}/monitor_test.py (100%) create mode 100644 tests/unit_tests/payload_test.py create mode 100644 tests/unit_tests/processor_test.py diff --git a/pyinfra/payload_processing/payload.py b/pyinfra/payload_processing/payload.py index 178effb..debf69f 100644 --- a/pyinfra/payload_processing/payload.py +++ b/pyinfra/payload_processing/payload.py @@ -149,20 +149,28 @@ def format_service_processing_result_for_storage(payload: QueueMessagePayload, r @format_service_processing_result_for_storage.register(LegacyQueueMessagePayload) def _(payload: LegacyQueueMessagePayload, result: Sized) -> dict: + processing_kwargs = payload.processing_kwargs or {} + x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} return { "dossierId": payload.dossier_id, "fileId": payload.file_id, "targetFileExtension": payload.target_file_extension, "responseFileExtension": payload.response_file_extension, + **x_tenant_id, + **processing_kwargs, "data": result, } @format_service_processing_result_for_storage.register(QueueMessagePayload) def _(payload: QueueMessagePayload, result: Sized) -> dict: + processing_kwargs = payload.processing_kwargs or {} + x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} return { "targetFilePath": payload.target_file_path, "responseFilePath": payload.response_file_path, + **x_tenant_id, + **processing_kwargs, "data": result, } @@ -173,13 +181,19 @@ def format_to_queue_message_response_body(queue_message_payload: QueueMessagePay @format_to_queue_message_response_body.register(LegacyQueueMessagePayload) -def _(queue_message_payload: LegacyQueueMessagePayload) -> dict: - return {"dossierId": queue_message_payload.dossier_id, "fileId": queue_message_payload.file_id} +def _(payload: LegacyQueueMessagePayload) -> dict: + processing_kwargs = payload.processing_kwargs or {} + x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} + return {"dossierId": payload.dossier_id, "fileId": payload.file_id, **x_tenant_id, **processing_kwargs} @format_to_queue_message_response_body.register(QueueMessagePayload) -def _(queue_message_payload: QueueMessagePayload) -> dict: +def _(payload: QueueMessagePayload) -> dict: + processing_kwargs = payload.processing_kwargs or {} + x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} return { - "targetFilePath": queue_message_payload.target_file_path, - "responseFilePath": queue_message_payload.response_file_path, + "targetFilePath": payload.target_file_path, + "responseFilePath": payload.response_file_path, + **x_tenant_id, + **processing_kwargs, } diff --git a/pyinfra/storage/storage_provider.py b/pyinfra/storage/storage_provider.py index e5e6900..345a096 100644 --- a/pyinfra/storage/storage_provider.py +++ b/pyinfra/storage/storage_provider.py @@ -4,8 +4,12 @@ from kn_utils.logging import logger from typing import Tuple from pyinfra.config import Config -from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo, \ - get_storage_from_storage_info +from pyinfra.storage.storage_info import ( + get_storage_info_from_config, + get_storage_info_from_endpoint, + StorageInfo, + get_storage_from_storage_info, +) from pyinfra.storage.storages.interface import Storage @@ -21,10 +25,10 @@ class StorageProvider: ) def __call__(self, *args, **kwargs): - return self.connect(*args, **kwargs) + return self._connect(*args, **kwargs) @lru_cache(maxsize=32) - def connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]: + def _connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]: storage_info = self._get_storage_info(x_tenant_id) storage_connection = get_storage_from_storage_info(storage_info) return storage_connection, storage_info @@ -40,3 +44,12 @@ class StorageProvider: logger.trace(f"{asdict(storage_info)}") return storage_info + + +class StorageProviderMock(StorageProvider): + def __init__(self, storage, storage_info): + self.storage = storage + self.storage_info = storage_info + + def __call__(self, *args, **kwargs): + return self.storage, self.storage_info diff --git a/pyinfra/storage/storages/mock.py b/pyinfra/storage/storages/mock.py new file mode 100644 index 0000000..b209399 --- /dev/null +++ b/pyinfra/storage/storages/mock.py @@ -0,0 +1,36 @@ +from pyinfra.storage.storages.interface import Storage + + +class StorageMock(Storage): + def __init__(self, data: bytes = None, file_name: str = None, bucket: str = None): + self.data = data + self.file_name = file_name + self.bucket = bucket + + def make_bucket(self, bucket_name): + self.bucket = bucket_name + + def has_bucket(self, bucket_name): + return self.bucket == bucket_name + + def put_object(self, bucket_name, object_name, data): + self.bucket = bucket_name + self.file_name = object_name + self.data = data + + def exists(self, bucket_name, object_name): + return self.bucket == bucket_name and self.file_name == object_name + + def get_object(self, bucket_name, object_name): + return self.data + + def get_all_objects(self, bucket_name): + raise NotImplementedError + + def clear_bucket(self, bucket_name): + self.bucket = None + self.file_name = None + self.data = None + + def get_all_object_names(self, bucket_name): + raise NotImplementedError diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..37020a3 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +norecursedirs = tests/tests_with_docker_compose +; storage tests and queue manager tests are disabled, since docker compose takes too long to start for regular tests. +; They should be run though once before a release. +; The queue manager tests are also disabled because they are not stable. \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 053d592..0cd40d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,83 +1,142 @@ -import logging -import time -from pathlib import Path - +import gzip +import json import pytest -import testcontainers.compose -from pyinfra.config import get_config -from pyinfra.storage import get_storage_from_config - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -TESTS_DIR = Path(__file__).resolve().parents[0] - - -@pytest.fixture(scope="session", autouse=True) -def docker_compose(sleep_seconds=30): - """Note: `autouse` can be set to `False` while working on the code to speed up the testing. In that case, run - `docker-compose up` in the tests directory manually before running the tests. - """ - logger.info(f"Starting docker containers with {TESTS_DIR}/docker-compose.yml...") - compose = testcontainers.compose.DockerCompose(TESTS_DIR, compose_file_name="docker-compose.yml") - compose.start() - logger.info(f"Sleeping for {sleep_seconds} seconds to wait for containers to finish startup... ") - time.sleep(sleep_seconds) - yield compose - compose.stop() - - -@pytest.fixture(scope="session") -def test_storage_config(storage_backend, bucket_name, monitoring_enabled): - config = get_config() - config.storage_backend = storage_backend - config.storage_bucket = bucket_name - config.storage_azureconnectionstring = "DefaultEndpointsProtocol=https;AccountName=iqserdevelopment;AccountKey=4imAbV9PYXaztSOMpIyAClg88bAZCXuXMGJG0GA1eIBpdh2PlnFGoRBnKqLy2YZUSTmZ3wJfC7tzfHtuC6FEhQ==;EndpointSuffix=core.windows.net" - config.monitoring_enabled = monitoring_enabled - config.prometheus_metric_prefix = "test" - config.prometheus_port = 8080 - config.prometheus_host = "0.0.0.0" - return config - - -@pytest.fixture(scope="session") -def test_queue_config(): - config = get_config() - config.rabbitmq_connection_sleep = 2 - config.rabbitmq_heartbeat = 4 - return config +from pyinfra.payload_processing.payload import LegacyQueueMessagePayload, QueueMessagePayload @pytest.fixture -def payload(x_tenant_id): +def legacy_payload(x_tenant_id, optional_processing_kwargs): x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} return { "dossierId": "test", "fileId": "test", - "targetFileExtension": "json.gz", - "responseFileExtension": "json.gz", + "targetFileExtension": "target.json.gz", + "responseFileExtension": "response.json.gz", **x_tenant_entry, + **optional_processing_kwargs, } -@pytest.fixture(scope="session") -def response_payload(): +@pytest.fixture +def target_file_path(): + return "test/test.target.json.gz" + + +@pytest.fixture +def response_file_path(): + return "test/test.response.json.gz" + + +@pytest.fixture +def payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path): + x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} + return { + "targetFilePath": target_file_path, + "responseFilePath": response_file_path, + **x_tenant_entry, + **optional_processing_kwargs, + } + + +@pytest.fixture +def legacy_queue_response_payload(x_tenant_id, optional_processing_kwargs): + x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} return { "dossierId": "test", "fileId": "test", + **x_tenant_entry, + **optional_processing_kwargs, } -@pytest.fixture(scope="session") -def storage(test_storage_config): - logger.debug("Setup for storage") - storage = get_storage_from_config(test_storage_config) - storage.make_bucket(test_storage_config.storage_bucket) - storage.clear_bucket(test_storage_config.storage_bucket) - yield storage - logger.debug("Teardown for storage") - try: - storage.clear_bucket(test_storage_config.storage_bucket) - except: - pass +@pytest.fixture +def queue_response_payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path): + x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} + return { + "targetFilePath": target_file_path, + "responseFilePath": response_file_path, + **x_tenant_entry, + **optional_processing_kwargs, + } + + +@pytest.fixture +def legacy_storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json): + x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} + return { + "dossierId": "test", + "fileId": "test", + "targetFileExtension": "target.json.gz", + "responseFileExtension": "response.json.gz", + **x_tenant_entry, + **optional_processing_kwargs, + "data": processing_result_json, + } + + +@pytest.fixture +def storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json, target_file_path, response_file_path): + x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} + optional_processing_kwargs = optional_processing_kwargs or {} + return { + "targetFilePath": target_file_path, + "responseFilePath": response_file_path, + **x_tenant_entry, + **optional_processing_kwargs, + "data": processing_result_json, + } + + +@pytest.fixture +def legacy_parsed_payload( + x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path +) -> LegacyQueueMessagePayload: + return LegacyQueueMessagePayload( + dossier_id="test", + file_id="test", + x_tenant_id=x_tenant_id, + target_file_extension="target.json.gz", + response_file_extension="response.json.gz", + target_file_type="json", + target_compression_type="gz", + response_file_type="json", + response_compression_type="gz", + target_file_path=target_file_path, + response_file_path=response_file_path, + processing_kwargs=optional_processing_kwargs or {}, + ) + + +@pytest.fixture +def parsed_payload( + x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path +) -> QueueMessagePayload: + return QueueMessagePayload( + x_tenant_id=x_tenant_id, + target_file_type="json", + target_compression_type="gz", + response_file_type="json", + response_compression_type="gz", + target_file_path=target_file_path, + response_file_path=response_file_path, + processing_kwargs=optional_processing_kwargs or {}, + ) + + +@pytest.fixture +def target_json_file() -> bytes: + data = {"target": "test"} + enc_data = json.dumps(data).encode("utf-8") + compr_data = gzip.compress(enc_data) + return compr_data + + +@pytest.fixture +def processing_result_json() -> dict: + return {"response": "test"} diff --git a/tests/lru_test.py b/tests/lru_test.py deleted file mode 100644 index 9ab574f..0000000 --- a/tests/lru_test.py +++ /dev/null @@ -1,48 +0,0 @@ -from functools import lru_cache - -import pytest - - -def func(callback): - return callback() - - -@pytest.fixture() -def fn(maxsize): - return lru_cache(maxsize)(func) - - -@pytest.fixture(params=[1, 2, 5]) -def maxsize(request): - return request.param - - -class Callback: - def __init__(self, x): - self.initial_x = x - self.x = x - - def __call__(self, *args, **kwargs): - self.x += 1 - return self.x - - def __hash__(self): - return hash(self.initial_x) - - -def test_adding_to_cache_within_maxsize_does_not_overwrite(fn, maxsize): - c = Callback(0) - for i in range(maxsize): - assert fn(c) == 1 - assert fn(c) == 1 - - -def test_adding_to_cache_more_than_maxsize_does_overwrite(fn, maxsize): - - callbacks = [Callback(i) for i in range(maxsize)] - - for i in range(maxsize): - assert fn(callbacks[i]) == i + 1 - - assert fn(Callback(maxsize)) == maxsize + 1 - assert fn(callbacks[0]) == 2 diff --git a/tests/payload_parsing_test.py b/tests/payload_parsing_test.py deleted file mode 100644 index a66fb61..0000000 --- a/tests/payload_parsing_test.py +++ /dev/null @@ -1,54 +0,0 @@ -import pytest - -from pyinfra.payload_processing.payload import ( - QueueMessagePayloadParser, - LegacyQueueMessagePayload, -) -from pyinfra.utils.file_extension_parsing import make_file_extension_parser - - -@pytest.fixture -def expected_parsed_payload(x_tenant_id): - return LegacyQueueMessagePayload( - dossier_id="test", - file_id="test", - x_tenant_id=x_tenant_id, - target_file_extension="json.gz", - response_file_extension="json.gz", - target_file_type="json", - target_compression_type="gz", - response_file_type="json", - response_compression_type="gz", - target_file_path="test/test.json.gz", - response_file_path="test/test.json.gz", - processing_kwargs={}, - ) - - -@pytest.fixture -def file_extension_parser(allowed_file_types, allowed_compression_types): - return make_file_extension_parser(allowed_file_types, allowed_compression_types) - - -@pytest.fixture -def payload_parser(file_extension_parser): - return QueueMessagePayloadParser(file_extension_parser, allowed_processing_parameters=["operation"]) - - -@pytest.mark.parametrize("allowed_file_types,allowed_compression_types", [(["json", "pdf"], ["gz"])]) -class TestPayload: - @pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) - def test_payload_is_parsed_correctly(self, payload_parser, payload, expected_parsed_payload): - payload = payload_parser(payload) - assert payload == expected_parsed_payload - - @pytest.mark.parametrize( - "extension,expected", - [ - ("json.gz", ("json", "gz")), - ("json", ("json", None)), - ("prefix.json.gz", ("json", "gz")), - ], - ) - def test_parse_file_extension(self, file_extension_parser, extension, expected): - assert file_extension_parser(extension) == expected diff --git a/tests/payload_processor_test.py b/tests/payload_processor_test.py deleted file mode 100644 index b7a8b23..0000000 --- a/tests/payload_processor_test.py +++ /dev/null @@ -1,76 +0,0 @@ -import gzip -import json -from operator import itemgetter - -import pytest -import requests - -from pyinfra.payload_processing.processor import make_payload_processor - - -@pytest.fixture -def target_file(): - contents = {"numberOfPages": 10, "content1": "value1", "content2": "value2"} - return gzip.compress(json.dumps(contents).encode("utf-8")) - - -@pytest.fixture -def file_names(payload): - dossier_id, file_id, target_suffix, response_suffix = itemgetter( - "dossierId", - "fileId", - "targetFileExtension", - "responseFileExtension", - )(payload) - return f"{dossier_id}/{file_id}.{target_suffix}", f"{dossier_id}/{file_id}.{response_suffix}" - - -@pytest.fixture(scope="session") -def payload_processor(test_storage_config): - def file_processor_mock(json_file: dict): - return [json_file] - - yield make_payload_processor(file_processor_mock, test_storage_config) - - -@pytest.mark.parametrize("storage_backend", ["s3"], scope="session") -@pytest.mark.parametrize("bucket_name", ["testbucket"], scope="session") -@pytest.mark.parametrize("monitoring_enabled", [True, False], scope="session") -@pytest.mark.parametrize("x_tenant_id", [None]) -class TestPayloadProcessor: - def test_payload_processor_yields_correct_response_and_uploads_result( - self, - payload_processor, - storage, - bucket_name, - payload, - response_payload, - target_file, - file_names, - ): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, file_names[0], target_file) - response = payload_processor(payload) - - assert response == response_payload - - data_received = storage.get_object(bucket_name, file_names[1]) - - assert json.loads((gzip.decompress(data_received)).decode("utf-8")) == { - **payload, - "data": [json.loads(gzip.decompress(target_file).decode("utf-8"))], - } - - def test_catching_of_processing_failure(self, payload_processor, storage, bucket_name, payload): - storage.clear_bucket(bucket_name) - with pytest.raises(Exception): - payload_processor(payload) - - def test_prometheus_endpoint_is_available( - self, test_storage_config, monitoring_enabled, storage_backend, x_tenant_id - ): - if monitoring_enabled: - resp = requests.get( - f"http://{test_storage_config.prometheus_host}:{test_storage_config.prometheus_port}/prometheus" - ) - assert resp.status_code == 200 diff --git a/tests/docker-compose.yml b/tests/tests_with_docker_compose/docker-compose.yml similarity index 100% rename from tests/docker-compose.yml rename to tests/tests_with_docker_compose/docker-compose.yml diff --git a/tests/queue_manager_test.py b/tests/tests_with_docker_compose/queue_manager_test.py similarity index 100% rename from tests/queue_manager_test.py rename to tests/tests_with_docker_compose/queue_manager_test.py diff --git a/tests/storage_test.py b/tests/tests_with_docker_compose/storage_test.py similarity index 100% rename from tests/storage_test.py rename to tests/tests_with_docker_compose/storage_test.py diff --git a/tests/cipher_test.py b/tests/unit_tests/cipher_test.py similarity index 100% rename from tests/cipher_test.py rename to tests/unit_tests/cipher_test.py diff --git a/tests/unit_tests/file_extension_parsing_test.py b/tests/unit_tests/file_extension_parsing_test.py new file mode 100644 index 0000000..ef4741f --- /dev/null +++ b/tests/unit_tests/file_extension_parsing_test.py @@ -0,0 +1,32 @@ +import pytest + +from pyinfra.utils.file_extension_parsing import make_file_extension_parser + + +@pytest.fixture +def file_extension_parser(file_types, compression_types): + return make_file_extension_parser(file_types, compression_types) + + +@pytest.mark.parametrize( + "file_path,file_types,compression_types,expected_file_extension,expected_compression_extension", + [ + ("test.txt", ["txt"], ["gz"], "txt", None), + ("test.txt.gz", ["txt"], ["gz"], "txt", "gz"), + ("test.txt.gz", [], [], None, None), + ("test.txt.gz", ["txt"], [], "txt", None), + ("test.txt.gz", [], ["gz"], None, "gz"), + ("test", ["txt"], ["gz"], None, None), + ], +) +def test_file_extension_parsing( + file_extension_parser, + file_path, + file_types, + compression_types, + expected_file_extension, + expected_compression_extension, +): + file_extension, compression_extension = file_extension_parser(file_path) + assert file_extension == expected_file_extension + assert compression_extension == expected_compression_extension diff --git a/tests/monitor_test.py b/tests/unit_tests/monitor_test.py similarity index 100% rename from tests/monitor_test.py rename to tests/unit_tests/monitor_test.py diff --git a/tests/unit_tests/payload_test.py b/tests/unit_tests/payload_test.py new file mode 100644 index 0000000..037a53a --- /dev/null +++ b/tests/unit_tests/payload_test.py @@ -0,0 +1,48 @@ +import pytest + +from pyinfra.config import get_config +from pyinfra.payload_processing.payload import ( + get_queue_message_payload_parser, + format_to_queue_message_response_body, + format_service_processing_result_for_storage, +) + + +@pytest.fixture +def payload_parser(): + config = get_config() + return get_queue_message_payload_parser(config) + + +@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) +@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) +class TestPayloadParsing: + def test_legacy_payload_parsing(self, payload_parser, legacy_payload, legacy_parsed_payload): + parsed_payload = payload_parser(legacy_payload) + assert parsed_payload == legacy_parsed_payload + + def test_payload_parsing(self, payload_parser, payload, parsed_payload): + parsed_payload = payload_parser(payload) + assert parsed_payload == parsed_payload + + +@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) +@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) +class TestPayloadFormatting: + def test_legacy_payload_formatting_for_response(self, legacy_parsed_payload, legacy_queue_response_payload): + formatted_payload = format_to_queue_message_response_body(legacy_parsed_payload) + assert formatted_payload == legacy_queue_response_payload + + def test_payload_formatting_for_response(self, parsed_payload, queue_response_payload): + formatted_payload = format_to_queue_message_response_body(parsed_payload) + assert formatted_payload == queue_response_payload + + def test_legacy_payload_formatting_for_storage( + self, legacy_parsed_payload, processing_result_json, legacy_storage_payload + ): + formatted_payload = format_service_processing_result_for_storage(legacy_parsed_payload, processing_result_json) + assert formatted_payload == legacy_storage_payload + + def test_payload_formatting_for_storage(self, parsed_payload, processing_result_json, storage_payload): + formatted_payload = format_service_processing_result_for_storage(parsed_payload, processing_result_json) + assert formatted_payload == storage_payload diff --git a/tests/unit_tests/processor_test.py b/tests/unit_tests/processor_test.py new file mode 100644 index 0000000..cf9a7df --- /dev/null +++ b/tests/unit_tests/processor_test.py @@ -0,0 +1,81 @@ +import gzip +import json +import pytest + +from pyinfra.config import get_config +from pyinfra.payload_processing.payload import get_queue_message_payload_parser +from pyinfra.payload_processing.processor import PayloadProcessor +from pyinfra.storage.storage_info import StorageInfo +from pyinfra.storage.storage_provider import StorageProviderMock +from pyinfra.storage.storages.mock import StorageMock + + +@pytest.fixture +def bucket_name(): + return "test_bucket" + + +@pytest.fixture +def storage_mock(target_json_file, target_file_path, bucket_name): + storage = StorageMock(target_json_file, target_file_path, bucket_name) + return storage + + +@pytest.fixture +def storage_info_mock(bucket_name): + return StorageInfo(bucket_name) + + +@pytest.fixture +def data_processor_mock(processing_result_json): + def inner(data, **kwargs): + return processing_result_json + + return inner + + +@pytest.fixture +def payload_processor(storage_mock, storage_info_mock, data_processor_mock): + storage_provider = StorageProviderMock(storage_mock, storage_info_mock) + payload_parser = get_queue_message_payload_parser(get_config()) + return PayloadProcessor(storage_provider, payload_parser, data_processor_mock) + + +@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) +@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) +class TestPayloadProcessor: + def test_payload_processor_yields_correct_response_and_uploads_result_for_legacy_message( + self, + payload_processor, + storage_mock, + bucket_name, + response_file_path, + legacy_payload, + legacy_queue_response_payload, + legacy_storage_payload, + ): + response = payload_processor(legacy_payload) + + assert response == legacy_queue_response_payload + + data_stored = storage_mock.get_object(bucket_name, response_file_path) + + assert json.loads(gzip.decompress(data_stored).decode()) == legacy_storage_payload + + def test_payload_processor_yields_correct_response_and_uploads_result( + self, + payload_processor, + storage_mock, + bucket_name, + response_file_path, + payload, + queue_response_payload, + storage_payload, + ): + response = payload_processor(payload) + + assert response == queue_response_payload + + data_stored = storage_mock.get_object(bucket_name, response_file_path) + + assert json.loads(gzip.decompress(data_stored).decode()) == storage_payload From 72547201f31fd25b4a0d1b314b521948aef2e960 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 23 Aug 2023 09:45:02 +0200 Subject: [PATCH 7/7] Adjust log levels to reduce log clutter Also updates readme and adds pytest execution to CI script. --- .gitlab-ci.yml | 2 +- README.md | 66 ++++++++++++++++++------- pyinfra/payload_processing/processor.py | 5 +- pyinfra/queue/queue_manager.py | 5 +- 4 files changed, 52 insertions(+), 26 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5959c15..2134f00 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -5,4 +5,4 @@ include: run-tests: script: - - echo "skipping tests" + - pytest . diff --git a/README.md b/README.md index f274497..51b4049 100755 --- a/README.md +++ b/README.md @@ -45,6 +45,19 @@ A configuration is located in `/config.yaml`. All relevant variables can be conf ### Expected AMQP input message: +Either use the legacy format with dossierId and fileId as strings or the new format where absolute paths are used. +A tenant ID can be optionally provided in the message header (key: "X-TENANT-ID") + + +```json +{ + "targetFilePath": "", + "responseFilePath": "" +} +``` + +or + ```json { "dossierId": "", @@ -58,6 +71,16 @@ Optionally, the input message can contain a field with the key `"operations"`. ### AMQP output message: + +```json +{ + "targetFilePath": "", + "responseFilePath": "" +} +``` + +or + ```json { "dossierId": "", @@ -69,31 +92,37 @@ Optionally, the input message can contain a field with the key `"operations"`. ### Setup -Install project dependencies +Add the respective version of the pyinfra package to your pyproject.toml file. Make sure to add our gitlab registry as a source. +For now, all internal packages used by pyinfra also have to be added to the pyproject.toml file. +Execute `poetry lock` and `poetry install` to install the packages. -```bash - make poetry -``` +```toml +[tool.poetry.dependencies] +pyinfra = { version = "1.6.0", source = "gitlab-research" } +kn-utils = { version = "0.1.4", source = "gitlab-research" } -You don't have to install it independently in the project repo, just `import pyinfra` in any `.py`-file - -or install form another project - -```bash -poetry add git+ssh://git@git.iqser.com:2222/rr/pyinfra.git#TAG-NUMBER +[[tool.poetry.source]] +name = "gitlab-research" +url = "https://gitlab.knecon.com/api/v4/groups/19/-/packages/pypi/simple" +priority = "explicit" ``` ### API ```python -from pyinfra.config import get_config +from pyinfra import config from pyinfra.payload_processing.processor import make_payload_processor from pyinfra.queue.queue_manager import QueueManager -queue_manager = QueueManager(get_config()) -queue_manager.start_consuming(make_payload_processor(data_processor)) +pyinfra_config = config.get_config() + +process_payload = make_payload_processor(process_data, config=pyinfra_config) + +queue_manager = QueueManager(pyinfra_config) +queue_manager.start_consuming(process_payload) ``` -The data_processor should expect a dict or bytes (pdf) as input and should return a list of results. + +`process_data` should expect a dict (json) or bytes (pdf) as input and should return a list of results. ## Scripts @@ -111,11 +140,12 @@ $ python scripts/start_pyinfra.py **Shell 3**: Upload dummy content on storage and publish message ```bash -$ python scripts/mock_process_request.py +$ python scripts/send_request.py ``` ## Tests -The tests take a bit longer than you are probably used to, because among other things the required startup times are -quite high. The test runtime can be accelerated by setting 'autouse' to 'False'. In that case, run 'docker-compose up' -in the tests dir manually before running the tests. \ No newline at end of file +Running all tests take a bit longer than you are probably used to, because among other things the required startup times are +quite high for docker-compose dependent tests. This is why the tests are split into two parts. The first part contains all +tests that do not require docker-compose and the second part contains all tests that require docker-compose. +Per default, only the first part is executed, but when releasing a new version, all tests should be executed. \ No newline at end of file diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py index 2670af8..4fcea1d 100644 --- a/pyinfra/payload_processing/processor.py +++ b/pyinfra/payload_processing/processor.py @@ -1,4 +1,4 @@ -from kn_utils.logging import getLogger +from kn_utils.logging import logger from dataclasses import asdict from typing import Callable, List @@ -14,9 +14,6 @@ from pyinfra.payload_processing.payload import ( from pyinfra.storage.storage import make_downloader, make_uploader from pyinfra.storage.storage_provider import StorageProvider -logger = getLogger() -logger.setLevel(get_config().logging_level_root) - class PayloadProcessor: def __init__( diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/queue_manager.py index 7301506..9a8e8b8 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/queue_manager.py @@ -119,7 +119,6 @@ class QueueManager: """ callback = self._create_queue_callback(process_payload) self._set_consumer_token(None) - logger.info("Consuming from queue ...") try: self._open_channel() @@ -165,7 +164,7 @@ class QueueManager: def acknowledge_message_and_publish_response(frame, headers, response_body): response_properties = pika.BasicProperties(headers=headers) if headers else None self._channel.basic_publish("", self._output_queue, json.dumps(response_body).encode(), response_properties) - logger.info(f"Result published, acknowledging incoming message with delivery_tag {frame.delivery_tag}.") + logger.debug(f"Result published, acknowledging incoming message with delivery_tag {frame.delivery_tag}.") self._channel.basic_ack(frame.delivery_tag) def callback(_channel, frame, properties, body): @@ -183,7 +182,7 @@ class QueueManager: try: logger.debug(f"Processing {frame}, {properties}, {body}") - filtered_message_headers = safe_project(properties.headers, ["X-TENANT-ID"]) # TODO: parametrize key? + filtered_message_headers = safe_project(properties.headers, ["X-TENANT-ID"]) message_body = {**json.loads(body), **filtered_message_headers} processing_result = process_message_body_and_await_result(message_body)