From 1f482f24765939dd692d649858548f3c42d6aea0 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 9 Jan 2024 16:07:48 +0100 Subject: [PATCH 01/39] fix: storage test --- tests/conftest.py | 6 +++++ .../tests_with_docker_compose/storage_test.py | 25 +++++++++++++------ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0cd40d6..35f389c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,9 +2,15 @@ import gzip import json import pytest +from pyinfra.config import get_config from pyinfra.payload_processing.payload import LegacyQueueMessagePayload, QueueMessagePayload +@pytest.fixture(scope="session") +def settings(): + return get_config() + + @pytest.fixture def legacy_payload(x_tenant_id, optional_processing_kwargs): x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} diff --git a/tests/tests_with_docker_compose/storage_test.py b/tests/tests_with_docker_compose/storage_test.py index 80ab4b0..2a8c0c7 100644 --- a/tests/tests_with_docker_compose/storage_test.py +++ b/tests/tests_with_docker_compose/storage_test.py @@ -1,14 +1,25 @@ -import logging - import pytest -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) +from pyinfra.storage.storage import get_storage_from_config -@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session") -@pytest.mark.parametrize("bucket_name", ["testbucket"], scope="session") -@pytest.mark.parametrize("monitoring_enabled", [False], scope="session") +@pytest.fixture(scope="session") +def storage(storage_backend, bucket_name, settings): + settings.storage_backend = storage_backend + settings.storage_bucket = bucket_name + + storage = get_storage_from_config(settings) + storage.make_bucket(bucket_name) + + yield storage + storage.clear_bucket(bucket_name) + + +# @pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session") +# FIXME: Azure storage test needs the secret azure connection string +# when the config is refactored as file, add this and provide file via bitwarden +@pytest.mark.parametrize("storage_backend", ["s3"], scope="session") +@pytest.mark.parametrize("bucket_name", ["bucket"], scope="session") class TestStorage: def test_clearing_bucket_yields_empty_bucket(self, storage, bucket_name): storage.clear_bucket(bucket_name) From 64871bbb6280423a306ffba7ddebf28850053c4c Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Mon, 15 Jan 2024 10:30:07 +0100 Subject: [PATCH 02/39] refactor: add basic queue manager test --- tests/tests_with_docker_compose/queue_test.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/tests_with_docker_compose/queue_test.py diff --git a/tests/tests_with_docker_compose/queue_test.py b/tests/tests_with_docker_compose/queue_test.py new file mode 100644 index 0000000..d293eef --- /dev/null +++ b/tests/tests_with_docker_compose/queue_test.py @@ -0,0 +1,38 @@ +import gzip +import json +from multiprocessing import Process +from time import sleep + +from kn_utils.logging import logger + +from pyinfra.queue.development_queue_manager import DevelopmentQueueManager +from pyinfra.queue.queue_manager import QueueManager + + +class TestQueueManager: + def test_basic_functionality(self, settings): + settings.rabbitmq_heartbeat = 7200 + development_queue_manager = DevelopmentQueueManager(settings) + + message = { + "targetFilePath": "test/target.json.gz", + "responseFilePath": "test/response.json.gz", + } + + development_queue_manager.publish_request(message) + + queue_manager = QueueManager(settings) + + consume = lambda: queue_manager.start_consuming(lambda x: x) + p = Process(target=consume) + p.start() + + wait_time = 1 + logger.info(f"Waiting {wait_time} seconds for the consumer to process the message...") + sleep(wait_time) + + p.kill() + + response = development_queue_manager.get_response() + + print(response) From b49645cce467e6e38bafc3cdfc86c2893e09a793 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Mon, 15 Jan 2024 16:46:33 +0100 Subject: [PATCH 03/39] refactor: queue manager and config logic WIP --- config/settings.toml | 40 +++++ pyinfra/queue/queue_manager.py | 142 +++++++++++++++++- tests/tests_with_docker_compose/queue_test.py | 30 ++-- 3 files changed, 199 insertions(+), 13 deletions(-) create mode 100644 config/settings.toml diff --git a/config/settings.toml b/config/settings.toml new file mode 100644 index 0000000..3760d2d --- /dev/null +++ b/config/settings.toml @@ -0,0 +1,40 @@ +[logging] +level = "DEBUG" + +[metrics.prometheus] +enabled = true +prefix = "redactmanager_research_service_parameter" # convention: '{product_name}_{service_name}_{parameter}' +host = "0.0.0.0" +port = 8080 + +[rabbitmq] +host = "localhost" +port = "5672" +username = "user" +password = "bitnami" +heartbeat = 5 +connection_sleep = 5 +write_consumer_token = false +input_queue = "request_queue" +output_queue = "response_queue" +dead_letter_queue = "dead_letter_queue" + +[storage] +type = "s3" + +[storage.s3] +bucket = "redaction" +endpoint = "http://127.0.0.1:9000" +key = "root" +secret = "password" +region = "eu-central-1" + +[storage.azure] +container = "redaction" +connection_string = "DefaultEndpointsProtocol=..." + +[multi_tenancy.server] +public_key = "redaction" +endpoint = "http://tenant-user-management:8081/internal-api/tenants" + + diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/queue_manager.py index b004c4d..c06ac7f 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/queue_manager.py @@ -2,14 +2,23 @@ import atexit import concurrent.futures import json import logging +import sys +import threading +import time +from functools import partial +from typing import Union, Callable + import pika import pika.exceptions import signal + +from dynaconf import Dynaconf from kn_utils.logging import logger from pathlib import Path -from pika.adapters.blocking_connection import BlockingChannel +from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection +from retry import retry -from pyinfra.config import Config +from pyinfra.config import Config, load_settings from pyinfra.exception import ProcessingFailure from pyinfra.payload_processing.processor import PayloadProcessor from pyinfra.utils.dict import safe_project @@ -203,3 +212,132 @@ class QueueManager: raise return callback + + +class QueueManagerV2: + def __init__(self, settings: Dynaconf = load_settings()): + self.input_queue = settings.rabbitmq.input_queue + self.output_queue = settings.rabbitmq.output_queue + self.dead_letter_queue = settings.rabbitmq.dead_letter_queue + + self.connection_parameters = self.create_connection_parameters(settings) + + self.connection: Union[BlockingConnection, None] = None + self.channel: Union[BlockingChannel, None] = None + + self.consumer_thread: Union[threading.Thread, None] = None + self.worker_threads: list[threading.Thread] = [] + + atexit.register(self.stop_consuming) + signal.signal(signal.SIGTERM, self._handle_stop_signal) + signal.signal(signal.SIGINT, self._handle_stop_signal) + + @staticmethod + def create_connection_parameters(settings: Dynaconf): + credentials = pika.PlainCredentials(username=settings.rabbitmq.username, password=settings.rabbitmq.password) + pika_connection_params = { + "host": settings.rabbitmq.host, + "port": settings.rabbitmq.port, + "credentials": credentials, + "heartbeat": settings.rabbitmq.heartbeat, + } + + return pika.ConnectionParameters(**pika_connection_params) + + @retry(tries=5, delay=5, jitter=(1, 3)) + def establish_connection(self): + # TODO: set sensible retry parameters + if self.connection and self.connection.is_open: + logger.debug("Connection to RabbitMQ already established.") + return + + logger.info("Establishing connection to RabbitMQ...") + self.connection = pika.BlockingConnection(parameters=self.connection_parameters) + self.channel = self.connection.channel() + self.channel.basic_qos(prefetch_count=1) + + args = { + "x-dead-letter-exchange": "", + "x-dead-letter-routing-key": self.dead_letter_queue, + } + + self.channel.queue_declare(self.input_queue, arguments=args, auto_delete=False, durable=True) + self.channel.queue_declare(self.output_queue, arguments=args, auto_delete=False, durable=True) + logger.info("Connection to RabbitMQ established.") + + def publish_message(self, message: dict, properties: pika.BasicProperties = None): + self.establish_connection() + message_encoded = json.dumps(message).encode("utf-8") + self.channel.basic_publish( + "", + self.input_queue, + properties=properties, + body=message_encoded, + ) + logger.info(f"Published message to queue {self.input_queue}.") + + def get_message(self): + self.establish_connection() + return self.channel.basic_get(self.output_queue) + + def create_on_message_callback(self, callback: Callable): + + def process_message_body_and_await_result(unpacked_message_body): + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: + logger.debug("Processing payload in separate thread.") + future = thread_pool_executor.submit(callback, unpacked_message_body) + + while future.running(): + logger.debug("Waiting for payload processing to finish...") + self.connection.process_data_events() + self.connection.sleep(5) + + return future.result() + + + def cb(ch, method, properties, body): + logger.info(f"Received message from queue with delivery_tag {method.delivery_tag}.") + result = process_message_body_and_await_result(body) + logger.info(f"Processed message with delivery_tag {method.delivery_tag}, publishing result to result-queue.") + ch.basic_publish( + "", + self.output_queue, + result, + ) + + ch.basic_ack(delivery_tag=method.delivery_tag) + logger.info(f"Message with delivery tag {method.delivery_tag} acknowledged.") + + return cb + + def start_consuming(self, message_processor: Callable): + on_message_callback = self.create_on_message_callback(message_processor) + self.establish_connection() + self.channel.basic_consume(self.input_queue, on_message_callback) + try: + self.channel.start_consuming() + except KeyboardInterrupt: + self.stop_consuming() + + def stop_consuming(self): + if self.channel and self.channel.is_open: + logger.info("Stopping consuming...") + self.channel.stop_consuming() + logger.info("Closing channel...") + self.channel.close() + + if self.connection and self.connection.is_open: + logger.info("Closing connection to RabbitMQ...") + self.connection.close() + + logger.info("Waiting for worker threads to finish...") + + for thread in self.worker_threads: + logger.info(f"Stopping worker thread {thread.name}...") + thread.join() + logger.info(f"Worker thread {thread.name} stopped.") + + def _handle_stop_signal(self, signum, *args, **kwargs): + logger.info(f"Received signal {signum}, stopping consuming...") + self.stop_consuming() + sys.exit(0) \ No newline at end of file diff --git a/tests/tests_with_docker_compose/queue_test.py b/tests/tests_with_docker_compose/queue_test.py index d293eef..e48c15f 100644 --- a/tests/tests_with_docker_compose/queue_test.py +++ b/tests/tests_with_docker_compose/queue_test.py @@ -1,38 +1,46 @@ -import gzip import json from multiprocessing import Process from time import sleep from kn_utils.logging import logger +from pyinfra.config import get_config from pyinfra.queue.development_queue_manager import DevelopmentQueueManager -from pyinfra.queue.queue_manager import QueueManager +from pyinfra.queue.queue_manager import QueueManager, QueueManagerV2 +def callback(x): + sleep(4) + response = json.dumps({"status": "success"}).encode("utf-8") + return response + class TestQueueManager: def test_basic_functionality(self, settings): - settings.rabbitmq_heartbeat = 7200 - development_queue_manager = DevelopmentQueueManager(settings) - message = { "targetFilePath": "test/target.json.gz", "responseFilePath": "test/response.json.gz", } - development_queue_manager.publish_request(message) + queue_manager = QueueManagerV2() + # queue_manager_old = QueueManager(get_config()) - queue_manager = QueueManager(settings) + queue_manager.publish_message(message) + queue_manager.publish_message(message) + queue_manager.publish_message(message) + logger.info("Published message") - consume = lambda: queue_manager.start_consuming(lambda x: x) + # consume = lambda: queue_manager.start_consuming(callback) + consume = lambda: queue_manager.start_consuming(callback) p = Process(target=consume) p.start() - wait_time = 1 - logger.info(f"Waiting {wait_time} seconds for the consumer to process the message...") + wait_time = 20 + # logger.info(f"Waiting {wait_time} seconds for the consumer to process the message...") sleep(wait_time) p.kill() - response = development_queue_manager.get_response() + response = queue_manager.get_message() + logger.info(f"Response: {response}") print(response) From ebc519ee0d9a1b62b2f86010a72838079a4e54b0 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 16 Jan 2024 14:16:27 +0100 Subject: [PATCH 04/39] refactor: finnish queue manager, queue manager tests, also add validation logic, integrate new settings --- config/settings.toml | 40 -- pyinfra/config.py | 29 +- pyinfra/queue/development_queue_manager.py | 40 -- pyinfra/queue/queue_manager.py | 355 +++++------------- pyinfra/utils/config_validation.py | 30 ++ tests/conftest.py | 4 +- tests/tests_with_docker_compose/queue_test.py | 114 ++++-- 7 files changed, 244 insertions(+), 368 deletions(-) delete mode 100644 config/settings.toml delete mode 100644 pyinfra/queue/development_queue_manager.py create mode 100644 pyinfra/utils/config_validation.py diff --git a/config/settings.toml b/config/settings.toml deleted file mode 100644 index 3760d2d..0000000 --- a/config/settings.toml +++ /dev/null @@ -1,40 +0,0 @@ -[logging] -level = "DEBUG" - -[metrics.prometheus] -enabled = true -prefix = "redactmanager_research_service_parameter" # convention: '{product_name}_{service_name}_{parameter}' -host = "0.0.0.0" -port = 8080 - -[rabbitmq] -host = "localhost" -port = "5672" -username = "user" -password = "bitnami" -heartbeat = 5 -connection_sleep = 5 -write_consumer_token = false -input_queue = "request_queue" -output_queue = "response_queue" -dead_letter_queue = "dead_letter_queue" - -[storage] -type = "s3" - -[storage.s3] -bucket = "redaction" -endpoint = "http://127.0.0.1:9000" -key = "root" -secret = "password" -region = "eu-central-1" - -[storage.azure] -container = "redaction" -connection_string = "DefaultEndpointsProtocol=..." - -[multi_tenancy.server] -public_key = "redaction" -endpoint = "http://tenant-user-management:8081/internal-api/tenants" - - diff --git a/pyinfra/config.py b/pyinfra/config.py index 396e20d..c6cc9c9 100644 --- a/pyinfra/config.py +++ b/pyinfra/config.py @@ -1,6 +1,10 @@ +import os from os import environ +from pathlib import Path from typing import Union +from dynaconf import Dynaconf + from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint @@ -45,7 +49,7 @@ class Config: self.rabbitmq_password = read_from_environment("RABBITMQ_PASSWORD", "bitnami") # Controls AMQP heartbeat timeout in seconds - self.rabbitmq_heartbeat = int(read_from_environment("RABBITMQ_HEARTBEAT", 60)) + self.rabbitmq_heartbeat = int(read_from_environment("RABBITMQ_HEARTBEAT", 1)) # Controls AMQP connection sleep timer in seconds # important for heartbeat to come through while main function runs on other thread @@ -96,7 +100,9 @@ class Config: # config for x-tenant-endpoint to receive storage connection information per tenant self.tenant_decryption_public_key = read_from_environment("TENANT_PUBLIC_KEY", "redaction") - self.tenant_endpoint = read_from_environment("TENANT_ENDPOINT", "http://tenant-user-management:8081/internal-api/tenants") + self.tenant_endpoint = read_from_environment( + "TENANT_ENDPOINT", "http://tenant-user-management:8081/internal-api/tenants" + ) # Value to see if we should write a consumer token to a file self.write_consumer_token = read_from_environment("WRITE_CONSUMER_TOKEN", "False") @@ -104,3 +110,22 @@ class Config: def get_config() -> Config: return Config() + + +def load_settings(): + # TODO: Make dynamic, so that the settings.toml file can be loaded from any location + # TODO: add validation + root_path = Path(__file__).resolve().parents[0] # this is pyinfra/ + repo_root_path = root_path.parents[0] # this is the root of the repo + os.environ["ROOT_PATH"] = str(root_path) + os.environ["REPO_ROOT_PATH"] = str(repo_root_path) + + settings = Dynaconf( + load_dotenv=True, + envvar_prefix=False, + settings_files=[ + repo_root_path / "config" / "settings.toml", + ], + ) + + return settings diff --git a/pyinfra/queue/development_queue_manager.py b/pyinfra/queue/development_queue_manager.py deleted file mode 100644 index a14df63..0000000 --- a/pyinfra/queue/development_queue_manager.py +++ /dev/null @@ -1,40 +0,0 @@ -import json - -import pika -import pika.exceptions - -from pyinfra.config import Config -from pyinfra.queue.queue_manager import QueueManager - - -class DevelopmentQueueManager(QueueManager): - """Extends the queue manger with additional functionality that is needed for tests and scripts, - but not in production, such as publishing messages. - """ - - def __init__(self, config: Config): - super().__init__(config) - self._open_channel() - - def publish_request(self, message: dict, properties: pika.BasicProperties = None): - message_encoded = json.dumps(message).encode("utf-8") - self._channel.basic_publish( - "", - self._input_queue, - properties=properties, - body=message_encoded, - ) - - def get_response(self): - return self._channel.basic_get(self._output_queue) - - def clear_queues(self): - """purge input & output queues""" - try: - self._channel.queue_purge(self._input_queue) - self._channel.queue_purge(self._output_queue) - except pika.exceptions.ChannelWrongStateError: - pass - - def close_channel(self): - self._channel.close() diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/queue_manager.py index c06ac7f..ecf6242 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/queue_manager.py @@ -2,220 +2,27 @@ import atexit import concurrent.futures import json import logging +import signal import sys -import threading -import time -from functools import partial from typing import Union, Callable import pika import pika.exceptions -import signal - from dynaconf import Dynaconf from kn_utils.logging import logger -from pathlib import Path from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry -from pyinfra.config import Config, load_settings -from pyinfra.exception import ProcessingFailure -from pyinfra.payload_processing.processor import PayloadProcessor -from pyinfra.utils.dict import safe_project - -CONFIG = Config() +from pyinfra.utils.config_validation import validate_settings, queue_manager_validators pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter -def get_connection_params(config: Config) -> pika.ConnectionParameters: - """creates pika connection params from pyinfra.Config class - - Args: - config (pyinfra.Config): standard pyinfra config class - - Returns: - pika.ConnectionParameters: standard pika connection param object - """ - credentials = pika.PlainCredentials(username=config.rabbitmq_username, password=config.rabbitmq_password) - pika_connection_params = { - "host": config.rabbitmq_host, - "port": config.rabbitmq_port, - "credentials": credentials, - "heartbeat": config.rabbitmq_heartbeat, - } - - return pika.ConnectionParameters(**pika_connection_params) - - -def _get_n_previous_attempts(props): - return 0 if props.headers is None else props.headers.get("x-retry-count", 0) - - -def token_file_name(): - """create filepath - - Returns: - joblib.Path: filepath - """ - token_file_path = Path("/tmp") / "consumer_token.txt" - return token_file_path - - class QueueManager: - """Handle RabbitMQ message reception & delivery""" + def __init__(self, settings: Dynaconf): + validate_settings(settings, queue_manager_validators) - def __init__(self, config: Config): - self._input_queue = config.request_queue - self._output_queue = config.response_queue - self._dead_letter_queue = config.dead_letter_queue - - # controls how often we send out a life signal - self._heartbeat = config.rabbitmq_heartbeat - - # controls for how long we only process data events (e.g. heartbeats), - # while the queue is blocked and we process the given callback function - self._connection_sleep = config.rabbitmq_connection_sleep - - self._write_token = config.write_consumer_token == "True" - self._set_consumer_token(None) - - self._connection_params = get_connection_params(config) - self._connection = pika.BlockingConnection(parameters=self._connection_params) - self._channel: BlockingChannel - - # necessary to pods can be terminated/restarted in K8s/docker - atexit.register(self.stop_consuming) - signal.signal(signal.SIGTERM, self._handle_stop_signal) - signal.signal(signal.SIGINT, self._handle_stop_signal) - - def _set_consumer_token(self, token_value): - self._consumer_token = token_value - - if self._write_token: - token_file_path = token_file_name() - - with token_file_path.open(mode="w", encoding="utf8") as token_file: - text = token_value if token_value is not None else "" - token_file.write(text) - - def _open_channel(self): - self._channel = self._connection.channel() - self._channel.basic_qos(prefetch_count=1) - - args = { - "x-dead-letter-exchange": "", - "x-dead-letter-routing-key": self._dead_letter_queue, - } - - self._channel.queue_declare(self._input_queue, arguments=args, auto_delete=False, durable=True) - self._channel.queue_declare(self._output_queue, arguments=args, auto_delete=False, durable=True) - - def start_consuming(self, process_payload: PayloadProcessor): - """consumption handling - - standard callback handling is enforced through wrapping process_message_callback in _create_queue_callback - (implements threading to support heartbeats) - - initially sets consumer token to None - - tries to - - open channels - - set consumer token to basic_consume, passing in the standard callback and input queue name - - calls pika start_consuming method on the channels - - catches all Exceptions & stops consuming + closes channels - - Args: - process_payload (Callable): function passed to the queue manager, configured by implementing service - """ - callback = self._create_queue_callback(process_payload) - self._set_consumer_token(None) - - try: - self._open_channel() - self._set_consumer_token(self._channel.basic_consume(self._input_queue, callback)) - logger.info(f"Registered with consumer-tag: {self._consumer_token}") - self._channel.start_consuming() - - except Exception: - logger.error( - "An unexpected exception occurred while consuming messages. Consuming will stop.", exc_info=True - ) - raise - - finally: - self.stop_consuming() - self._connection.close() - - def stop_consuming(self): - if self._consumer_token and self._connection: - logger.info(f"Cancelling subscription for consumer-tag {self._consumer_token}") - self._channel.stop_consuming(self._consumer_token) - self._set_consumer_token(None) - - def _handle_stop_signal(self, signal_number, _stack_frame, *args, **kwargs): - logger.info(f"Received signal {signal_number}") - self.stop_consuming() - - def _create_queue_callback(self, process_payload: PayloadProcessor): - def process_message_body_and_await_result(unpacked_message_body): - with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: - logger.debug("Processing payload in separate thread.") - future = thread_pool_executor.submit(process_payload, unpacked_message_body) - - while future.running(): - logger.debug("Waiting for payload processing to finish...") - self._connection.sleep(float(self._connection_sleep)) - - try: - return future.result() - except Exception as err: - raise ProcessingFailure(f"QueueMessagePayload processing failed: {repr(err)}") from err - - def acknowledge_message_and_publish_response(frame, headers, response_body): - response_properties = pika.BasicProperties(headers=headers) if headers else None - self._channel.basic_publish("", self._output_queue, json.dumps(response_body).encode(), response_properties) - logger.debug(f"Result published, acknowledging incoming message with delivery_tag {frame.delivery_tag}.") - self._channel.basic_ack(frame.delivery_tag) - - def callback(_channel, frame, properties, body): - logger.info(f"Received message from queue with delivery_tag {frame.delivery_tag}.") - logger.debug(f"Message headers: {properties.headers}") - - # Only try to process each message once. Re-queueing will be handled by the dead-letter-exchange. This - # prevents endless retries on messages that are impossible to process. - if frame.redelivered: - logger.info( - f"Aborting message processing for delivery_tag {frame.delivery_tag} due to it being redelivered.", - ) - self._channel.basic_nack(frame.delivery_tag, requeue=False) - return - - try: - logger.debug(f"Processing {frame}, {properties}, {body}") - filtered_message_headers = safe_project(properties.headers, ["X-TENANT-ID"]) - message_body = {**json.loads(body), **filtered_message_headers} - - processing_result = process_message_body_and_await_result(message_body) - logger.info( - f"Processed message with delivery_tag {frame.delivery_tag}, publishing result to result-queue." - ) - acknowledge_message_and_publish_response(frame, filtered_message_headers, processing_result) - - except ProcessingFailure as err: - logger.info(f"Processing message with delivery_tag {frame.delivery_tag} failed, declining.") - logger.exception(err) - self._channel.basic_nack(frame.delivery_tag, requeue=False) - - except Exception: - n_attempts = _get_n_previous_attempts(properties) + 1 - logger.warning(f"Failed to process message, {n_attempts}", exc_info=True) - self._channel.basic_nack(frame.delivery_tag, requeue=False) - raise - - return callback - - -class QueueManagerV2: - def __init__(self, settings: Dynaconf = load_settings()): self.input_queue = settings.rabbitmq.input_queue self.output_queue = settings.rabbitmq.output_queue self.dead_letter_queue = settings.rabbitmq.dead_letter_queue @@ -224,9 +31,7 @@ class QueueManagerV2: self.connection: Union[BlockingConnection, None] = None self.channel: Union[BlockingChannel, None] = None - - self.consumer_thread: Union[threading.Thread, None] = None - self.worker_threads: list[threading.Thread] = [] + self.connection_sleep = settings.rabbitmq.connection_sleep atexit.register(self.stop_consuming) signal.signal(signal.SIGTERM, self._handle_stop_signal) @@ -244,7 +49,7 @@ class QueueManagerV2: return pika.ConnectionParameters(**pika_connection_params) - @retry(tries=5, delay=5, jitter=(1, 3)) + @retry(tries=3, delay=5, jitter=(1, 3), logger=logger) def establish_connection(self): # TODO: set sensible retry parameters if self.connection and self.connection.is_open: @@ -253,6 +58,8 @@ class QueueManagerV2: logger.info("Establishing connection to RabbitMQ...") self.connection = pika.BlockingConnection(parameters=self.connection_parameters) + + logger.debug("Opening channel...") self.channel = self.connection.channel() self.channel.basic_qos(prefetch_count=1) @@ -263,60 +70,24 @@ class QueueManagerV2: self.channel.queue_declare(self.input_queue, arguments=args, auto_delete=False, durable=True) self.channel.queue_declare(self.output_queue, arguments=args, auto_delete=False, durable=True) - logger.info("Connection to RabbitMQ established.") - def publish_message(self, message: dict, properties: pika.BasicProperties = None): + logger.info("Connection to RabbitMQ established, channel open.") + + def is_ready(self): self.establish_connection() - message_encoded = json.dumps(message).encode("utf-8") - self.channel.basic_publish( - "", - self.input_queue, - properties=properties, - body=message_encoded, - ) - logger.info(f"Published message to queue {self.input_queue}.") - - def get_message(self): - self.establish_connection() - return self.channel.basic_get(self.output_queue) - - def create_on_message_callback(self, callback: Callable): - - def process_message_body_and_await_result(unpacked_message_body): - with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: - logger.debug("Processing payload in separate thread.") - future = thread_pool_executor.submit(callback, unpacked_message_body) - - while future.running(): - logger.debug("Waiting for payload processing to finish...") - self.connection.process_data_events() - self.connection.sleep(5) - - return future.result() - - - def cb(ch, method, properties, body): - logger.info(f"Received message from queue with delivery_tag {method.delivery_tag}.") - result = process_message_body_and_await_result(body) - logger.info(f"Processed message with delivery_tag {method.delivery_tag}, publishing result to result-queue.") - ch.basic_publish( - "", - self.output_queue, - result, - ) - - ch.basic_ack(delivery_tag=method.delivery_tag) - logger.info(f"Message with delivery tag {method.delivery_tag} acknowledged.") - - return cb + return self.channel.is_open def start_consuming(self, message_processor: Callable): - on_message_callback = self.create_on_message_callback(message_processor) - self.establish_connection() - self.channel.basic_consume(self.input_queue, on_message_callback) + on_message_callback = self._make_on_message_callback(message_processor) + try: + self.establish_connection() + self.channel.basic_consume(self.input_queue, on_message_callback) self.channel.start_consuming() - except KeyboardInterrupt: + except Exception: + logger.error("An unexpected error occurred while consuming messages. Consuming will stop.", exc_info=True) + raise + finally: self.stop_consuming() def stop_consuming(self): @@ -330,14 +101,88 @@ class QueueManagerV2: logger.info("Closing connection to RabbitMQ...") self.connection.close() - logger.info("Waiting for worker threads to finish...") + def publish_message_to_input_queue(self, message: Union[str, bytes, dict], properties: pika.BasicProperties = None): + if isinstance(message, str): + message = message.encode("utf-8") + elif isinstance(message, dict): + message = json.dumps(message).encode("utf-8") - for thread in self.worker_threads: - logger.info(f"Stopping worker thread {thread.name}...") - thread.join() - logger.info(f"Worker thread {thread.name} stopped.") + self.establish_connection() + self.channel.basic_publish( + "", + self.input_queue, + properties=properties, + body=message, + ) + logger.info(f"Published message to queue {self.input_queue}.") + + def purge_queues(self): + self.establish_connection() + try: + self.channel.queue_purge(self.input_queue) + self.channel.queue_purge(self.output_queue) + logger.info("Queues purged.") + except pika.exceptions.ChannelWrongStateError: + pass + + def get_message_from_output_queue(self): + self.establish_connection() + return self.channel.basic_get(self.output_queue, auto_ack=True) + + def _make_on_message_callback(self, message_processor: Callable): + def process_message_body_and_await_result(unpacked_message_body): + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: + logger.debug("Processing payload in separate thread.") + future = thread_pool_executor.submit(message_processor, unpacked_message_body) + + while future.running(): + logger.debug("Waiting for payload processing to finish...") + self.connection.process_data_events() + self.connection.sleep(self.connection_sleep) + + return future.result() + + def on_message_callback(channel, method, properties, body): + logger.info(f"Received message from queue with delivery_tag {method.delivery_tag}.") + + if method.redelivered: + logger.warning(f"Declining message with {method.delivery_tag=} due to it being redelivered.") + channel.basic_nack(method.delivery_tag, requeue=False) + return + + if body.decode("utf-8") == "STOP": + logger.info(f"Received stop signal, stopping consuming...") + channel.basic_ack(delivery_tag=method.delivery_tag) + self.stop_consuming() + return + + try: + filtered_message_headers = ( + {k: v for k, v in properties.headers.items() if k.lower().startswith("x-")} + if properties.headers + else {} + ) + logger.debug(f"Processing message with {filtered_message_headers=}.") + result = process_message_body_and_await_result({**json.loads(body), **filtered_message_headers}) + + channel.basic_publish( + "", + self.output_queue, + result, + properties=pika.BasicProperties(headers=filtered_message_headers), + ) + logger.info(f"Published result to queue {self.output_queue}.") + + channel.basic_ack(delivery_tag=method.delivery_tag) + logger.debug(f"Message with {method.delivery_tag=} acknowledged.") + except Exception: + logger.warning(f"Failed to process message with {method.delivery_tag=}, declining...", exc_info=True) + channel.basic_nack(method.delivery_tag, requeue=False) + raise + + return on_message_callback def _handle_stop_signal(self, signum, *args, **kwargs): logger.info(f"Received signal {signum}, stopping consuming...") self.stop_consuming() - sys.exit(0) \ No newline at end of file + sys.exit(0) diff --git a/pyinfra/utils/config_validation.py b/pyinfra/utils/config_validation.py new file mode 100644 index 0000000..d257277 --- /dev/null +++ b/pyinfra/utils/config_validation.py @@ -0,0 +1,30 @@ +from dynaconf import Validator, Dynaconf, ValidationError +from kn_utils.logging import logger + +queue_manager_validators = [ + Validator("rabbitmq.host", must_exist=True), + Validator("rabbitmq.port", must_exist=True), + Validator("rabbitmq.username", must_exist=True), + Validator("rabbitmq.password", must_exist=True), + Validator("rabbitmq.heartbeat", must_exist=True), + Validator("rabbitmq.connection_sleep", must_exist=True), + Validator("rabbitmq.input_queue", must_exist=True), + Validator("rabbitmq.output_queue", must_exist=True), + Validator("rabbitmq.dead_letter_queue", must_exist=True), +] + + +def validate_settings(settings: Dynaconf, validators): + settings_valid = True + + for validator in validators: + try: + validator.validate(settings) + except ValidationError as e: + settings_valid = False + logger.warning(e) + + if not settings_valid: + raise ValidationError("Settings validation failed.") + + logger.info("Settings validated.") diff --git a/tests/conftest.py b/tests/conftest.py index 35f389c..6b16f8c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,13 +2,13 @@ import gzip import json import pytest -from pyinfra.config import get_config +from pyinfra.config import get_config, load_settings from pyinfra.payload_processing.payload import LegacyQueueMessagePayload, QueueMessagePayload @pytest.fixture(scope="session") def settings(): - return get_config() + return load_settings() @pytest.fixture diff --git a/tests/tests_with_docker_compose/queue_test.py b/tests/tests_with_docker_compose/queue_test.py index e48c15f..43d6096 100644 --- a/tests/tests_with_docker_compose/queue_test.py +++ b/tests/tests_with_docker_compose/queue_test.py @@ -1,46 +1,102 @@ import json -from multiprocessing import Process +from sys import stdout from time import sleep +import pika +import pytest from kn_utils.logging import logger -from pyinfra.config import get_config -from pyinfra.queue.development_queue_manager import DevelopmentQueueManager -from pyinfra.queue.queue_manager import QueueManager, QueueManagerV2 +from pyinfra.queue.queue_manager import QueueManager + +logger.remove() +logger.add(sink=stdout, level="DEBUG") -def callback(x): - sleep(4) - response = json.dumps({"status": "success"}).encode("utf-8") - return response +def make_callback(process_time): + def callback(x): + sleep(process_time) + return json.dumps({"status": "success"}).encode("utf-8") + + return callback + + +@pytest.fixture(scope="session") +def queue_manager(settings): + settings.rabbitmq_heartbeat = 10 + settings.connection_sleep = 5 + queue_manager = QueueManager(settings) + yield queue_manager + + +@pytest.fixture +def input_message(): + return json.dumps({ + "targetFilePath": "test/target.json.gz", + "responseFilePath": "test/response.json.gz", + }) + + +@pytest.fixture +def stop_message(): + return "STOP" + class TestQueueManager: - def test_basic_functionality(self, settings): - message = { - "targetFilePath": "test/target.json.gz", - "responseFilePath": "test/response.json.gz", - } + def test_processing_of_several_messages(self, queue_manager, input_message, stop_message): + queue_manager.purge_queues() - queue_manager = QueueManagerV2() - # queue_manager_old = QueueManager(get_config()) + for _ in range(2): + queue_manager.publish_message_to_input_queue(input_message) - queue_manager.publish_message(message) - queue_manager.publish_message(message) - queue_manager.publish_message(message) - logger.info("Published message") + queue_manager.publish_message_to_input_queue(stop_message) - # consume = lambda: queue_manager.start_consuming(callback) - consume = lambda: queue_manager.start_consuming(callback) - p = Process(target=consume) - p.start() + callback = make_callback(1) + queue_manager.start_consuming(callback) - wait_time = 20 - # logger.info(f"Waiting {wait_time} seconds for the consumer to process the message...") - sleep(wait_time) + for _ in range(2): + response = queue_manager.get_message_from_output_queue() + assert response is not None + assert response[2] == b'{"status": "success"}' + print(response) - p.kill() + def test_all_headers_beginning_with_x_are_forwarded(self, queue_manager, input_message, stop_message): + queue_manager.purge_queues() - response = queue_manager.get_message() + properties = pika.BasicProperties( + headers={ + "X-TENANT-ID": "redaction", + "X-OTHER-HEADER": "other-header-value", + "x-tenant_id": "tenant-id-value", + "x_should_not_be_forwarded": "should-not-be-forwarded-value", + } + ) - logger.info(f"Response: {response}") + queue_manager.publish_message_to_input_queue(input_message, properties=properties) + queue_manager.publish_message_to_input_queue(stop_message) + + callback = make_callback(0.2) + queue_manager.start_consuming(callback) + + response = queue_manager.get_message_from_output_queue() print(response) + + assert response[2] == b'{"status": "success"}' + + assert response[1].headers["X-TENANT-ID"] == "redaction" + assert response[1].headers["X-OTHER-HEADER"] == "other-header-value" + assert response[1].headers["x-tenant_id"] == "tenant-id-value" + + assert "x_should_not_be_forwarded" not in response[1].headers + + def test_message_processing_does_not_block_heartbeat(self, queue_manager, input_message, stop_message): + queue_manager.purge_queues() + + queue_manager.publish_message_to_input_queue(input_message) + queue_manager.publish_message_to_input_queue(stop_message) + + callback = make_callback(15) + queue_manager.start_consuming(callback) + + response = queue_manager.get_message_from_output_queue() + + assert response[2] == b'{"status": "success"}' From 27917863c90cb05a7a80e9921bfdcde16e0e4e75 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 16 Jan 2024 14:21:41 +0100 Subject: [PATCH 05/39] refactor: finnish queue manager, queue manager tests, also add validation logic, integrate new settings --- pyinfra/k8s_probes/__init__.py | 0 pyinfra/k8s_probes/startup.py | 36 ----- .../queue/{queue_manager.py => manager.py} | 8 +- .../queue_manager_test.py | 123 ------------------ tests/tests_with_docker_compose/queue_test.py | 6 +- 5 files changed, 8 insertions(+), 165 deletions(-) delete mode 100644 pyinfra/k8s_probes/__init__.py delete mode 100644 pyinfra/k8s_probes/startup.py rename pyinfra/queue/{queue_manager.py => manager.py} (96%) delete mode 100644 tests/tests_with_docker_compose/queue_manager_test.py diff --git a/pyinfra/k8s_probes/__init__.py b/pyinfra/k8s_probes/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyinfra/k8s_probes/startup.py b/pyinfra/k8s_probes/startup.py deleted file mode 100644 index 9a8a183..0000000 --- a/pyinfra/k8s_probes/startup.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys -from kn_utils.logging import logger -from pathlib import Path - -from pyinfra.queue.queue_manager import token_file_name - - -def check_token_file(): - """ - Checks if the token file of the QueueManager exists and is not empty, i.e. the queue manager has been started. - - NOTE: This function suppresses all Exception's. - - Returns True if the queue manager has been started, False otherwise - """ - - try: - token_file_path = Path(token_file_name()) - - if token_file_path.exists(): - with token_file_path.open(mode="r", encoding="utf8") as token_file: - contents = token_file.read().strip() - - return contents != "" - # We intentionally do not handle exception here, since we're only using this in a short script. - # Take care to expand this if the intended use changes - except Exception as err: - logger.warning(f"{err}: Caught exception when reading from token file", exc_info=True) - return False - - -def run_checks(): - if check_token_file(): - sys.exit(0) - else: - sys.exit(1) diff --git a/pyinfra/queue/queue_manager.py b/pyinfra/queue/manager.py similarity index 96% rename from pyinfra/queue/queue_manager.py rename to pyinfra/queue/manager.py index ecf6242..5c5e238 100644 --- a/pyinfra/queue/queue_manager.py +++ b/pyinfra/queue/manager.py @@ -18,6 +18,8 @@ from pyinfra.utils.config_validation import validate_settings, queue_manager_val pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter +MessageProcessor = Callable[[dict], dict] + class QueueManager: def __init__(self, settings: Dynaconf): @@ -129,7 +131,7 @@ class QueueManager: self.establish_connection() return self.channel.basic_get(self.output_queue, auto_ack=True) - def _make_on_message_callback(self, message_processor: Callable): + def _make_on_message_callback(self, message_processor: MessageProcessor): def process_message_body_and_await_result(unpacked_message_body): with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: logger.debug("Processing payload in separate thread.") @@ -163,7 +165,9 @@ class QueueManager: else {} ) logger.debug(f"Processing message with {filtered_message_headers=}.") - result = process_message_body_and_await_result({**json.loads(body), **filtered_message_headers}) + result: dict = ( + process_message_body_and_await_result({**json.loads(body), **filtered_message_headers}) or {} + ) channel.basic_publish( "", diff --git a/tests/tests_with_docker_compose/queue_manager_test.py b/tests/tests_with_docker_compose/queue_manager_test.py deleted file mode 100644 index d6c9118..0000000 --- a/tests/tests_with_docker_compose/queue_manager_test.py +++ /dev/null @@ -1,123 +0,0 @@ -import json -import logging -import time -from multiprocessing import Process - -import pika -import pika.exceptions -import pytest - -from pyinfra.queue.development_queue_manager import DevelopmentQueueManager -from pyinfra.queue.queue_manager import QueueManager - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -@pytest.fixture(scope="session") -def development_queue_manager(test_queue_config): - test_queue_config.rabbitmq_heartbeat = 7200 - development_queue_manager = DevelopmentQueueManager(test_queue_config) - yield development_queue_manager - logger.info("Tearing down development queue manager...") - try: - development_queue_manager.close_channel() - except pika.exceptions.ConnectionClosedByBroker: - pass - - -@pytest.fixture(scope="session") -def payload_processing_time(test_queue_config, offset=5): - # FIXME: this implicitly tests the heartbeat when running the end-to-end test. There should be another way to test - # this explicitly. - return test_queue_config.rabbitmq_heartbeat + offset - - -@pytest.fixture(scope="session") -def payload_processor(response_payload, payload_processing_time, payload_processor_type): - def process(payload): - time.sleep(payload_processing_time) - return response_payload - - def process_with_failure(payload): - raise MemoryError - - if payload_processor_type == "mock": - return process - elif payload_processor_type == "failing": - return process_with_failure - - -@pytest.fixture(scope="session", autouse=True) -def start_queue_consumer(test_queue_config, payload_processor, sleep_seconds=5): - def consume_queue(): - queue_manager.start_consuming(payload_processor) - - queue_manager = QueueManager(test_queue_config) - p = Process(target=consume_queue) - p.start() - logger.info(f"Setting up consumer, waiting for {sleep_seconds}...") - time.sleep(sleep_seconds) - yield - logger.info("Tearing down consumer...") - p.terminate() - - -@pytest.fixture -def message_properties(message_headers): - if not message_headers: - return pika.BasicProperties(headers=None) - elif message_headers == "X-TENANT-ID": - return pika.BasicProperties(headers={"X-TENANT-ID": "redaction"}) - else: - raise Exception(f"Invalid {message_headers=}.") - - -@pytest.mark.parametrize("x_tenant_id", [None]) -class TestQueueManager: - # FIXME: All tests here are wonky. This is due to the implementation of running the process-blocking queue_manager - # in a subprocess. It is then very hard to interact directly with the subprocess. If you have a better idea, please - # refactor; the tests here are insufficient to ensure the functionality of the queue manager! - @pytest.mark.parametrize("payload_processor_type", ["mock"], scope="session") - def test_message_processing_does_not_block_heartbeat( - self, development_queue_manager, payload, response_payload, payload_processing_time - ): - development_queue_manager.clear_queues() - development_queue_manager.publish_request(payload) - time.sleep(payload_processing_time + 10) - _, _, body = development_queue_manager.get_response() - result = json.loads(body) - assert result == response_payload - - @pytest.mark.parametrize("message_headers", [None, "X-TENANT-ID"]) - @pytest.mark.parametrize("payload_processor_type", ["mock"], scope="session") - def test_queue_manager_forwards_message_headers( - self, - development_queue_manager, - payload, - response_payload, - payload_processing_time, - message_properties, - ): - development_queue_manager.clear_queues() - development_queue_manager.publish_request(payload, message_properties) - time.sleep(payload_processing_time + 10) - _, properties, _ = development_queue_manager.get_response() - assert properties.headers == message_properties.headers - - # FIXME: It is not possible to test the behavior of the queue manager directly, since it is running in a separate - # process. You require logging to see if the exception is handled correctly. Hence, this test is only useful for - # development, but insufficient to guarantee the correct behavior. - @pytest.mark.parametrize("payload_processor_type", ["failing"], scope="session") - def test_failed_message_processing_is_handled( - self, - development_queue_manager, - payload, - response_payload, - payload_processing_time, - ): - development_queue_manager.clear_queues() - development_queue_manager.publish_request(payload) - time.sleep(payload_processing_time + 10) - _, _, body = development_queue_manager.get_response() - assert not body diff --git a/tests/tests_with_docker_compose/queue_test.py b/tests/tests_with_docker_compose/queue_test.py index 43d6096..449cf30 100644 --- a/tests/tests_with_docker_compose/queue_test.py +++ b/tests/tests_with_docker_compose/queue_test.py @@ -6,7 +6,7 @@ import pika import pytest from kn_utils.logging import logger -from pyinfra.queue.queue_manager import QueueManager +from pyinfra.queue.manager import QueueManager logger.remove() logger.add(sink=stdout, level="DEBUG") @@ -15,7 +15,7 @@ logger.add(sink=stdout, level="DEBUG") def make_callback(process_time): def callback(x): sleep(process_time) - return json.dumps({"status": "success"}).encode("utf-8") + return json.dumps({"status": "success"}) return callback @@ -57,7 +57,6 @@ class TestQueueManager: response = queue_manager.get_message_from_output_queue() assert response is not None assert response[2] == b'{"status": "success"}' - print(response) def test_all_headers_beginning_with_x_are_forwarded(self, queue_manager, input_message, stop_message): queue_manager.purge_queues() @@ -78,7 +77,6 @@ class TestQueueManager: queue_manager.start_consuming(callback) response = queue_manager.get_message_from_output_queue() - print(response) assert response[2] == b'{"status": "success"}' From e5c8a6e9f1574ccbeb9b746cc59dc148de56d893 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 16 Jan 2024 15:34:56 +0100 Subject: [PATCH 06/39] refactor: update storages with dynaconf logic, add validators, repair test --- pyinfra/storage/storage.py | 29 ++++++++++--------- pyinfra/storage/storages/azure.py | 13 +++++---- pyinfra/storage/storages/s3.py | 23 ++++++++++----- pyinfra/utils/config_validation.py | 15 ++++++++++ .../tests_with_docker_compose/storage_test.py | 12 +++----- 5 files changed, 58 insertions(+), 34 deletions(-) diff --git a/pyinfra/storage/storage.py b/pyinfra/storage/storage.py index bd849d8..c452f0d 100644 --- a/pyinfra/storage/storage.py +++ b/pyinfra/storage/storage.py @@ -1,32 +1,30 @@ from functools import lru_cache, partial from typing import Callable +from dynaconf import Dynaconf from funcy import compose -from pyinfra.config import Config -from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_from_storage_info from pyinfra.storage.storages.interface import Storage +from pyinfra.storage.storages.s3 import get_s3_storage_from_settings from pyinfra.utils.compressing import get_decompressor, get_compressor +from pyinfra.utils.config_validation import validate_settings, storage_validators from pyinfra.utils.encoding import get_decoder, get_encoder -def get_storage_from_config(config: Config) -> Storage: +def get_storage_from_settings(settings: Dynaconf) -> Storage: + validate_settings(settings, storage_validators) - storage_info = get_storage_info_from_config(config) - storage = get_storage_from_storage_info(storage_info) - - return storage + return storage_dispatcher[settings.storage.backend](settings) -def verify_existence(storage: Storage, bucket: str, file_name: str) -> str: - if not storage.exists(bucket, file_name): - raise FileNotFoundError(f"{file_name=} name not found on storage in {bucket=}.") - return file_name +storage_dispatcher = { + "azure": get_s3_storage_from_settings, + "s3": get_s3_storage_from_settings, +} @lru_cache(maxsize=10) def make_downloader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - verify = partial(verify_existence, storage, bucket) download = partial(storage.get_object, bucket) decompress = get_decompressor(compression_type) @@ -37,7 +35,6 @@ def make_downloader(storage: Storage, bucket: str, file_type: str, compression_t @lru_cache(maxsize=10) def make_uploader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - upload = partial(storage.put_object, bucket) compress = get_compressor(compression_type) encode = get_encoder(file_type) @@ -46,3 +43,9 @@ def make_uploader(storage: Storage, bucket: str, file_type: str, compression_typ upload(file_name, compose(compress, encode)(file_bytes)) return inner + + +def verify_existence(storage: Storage, bucket: str, file_name: str) -> str: + if not storage.exists(bucket, file_name): + raise FileNotFoundError(f"{file_name=} name not found on storage in {bucket=}.") + return file_name diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index 4602da9..4c7467c 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -1,12 +1,14 @@ import logging -from azure.storage.blob import BlobServiceClient, ContainerClient from itertools import repeat -from kn_utils.logging import logger from operator import attrgetter + +from azure.storage.blob import BlobServiceClient, ContainerClient +from dynaconf import Dynaconf +from kn_utils.logging import logger from retry import retry -from pyinfra.config import Config from pyinfra.storage.storages.interface import Storage +from pyinfra.utils.config_validation import azure_storage_validators, validate_settings logging.getLogger("azure").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) @@ -74,5 +76,6 @@ class AzureStorage(Storage): return zip(repeat(bucket_name), map(attrgetter("name"), blobs)) -def get_azure_storage_from_config(config: Config): - return AzureStorage(BlobServiceClient.from_connection_string(conn_str=config.storage_azureconnectionstring)) +def get_azure_storage_from_settings(settings: Dynaconf): + validate_settings(settings, azure_storage_validators) + return AzureStorage(BlobServiceClient.from_connection_string(conn_str=settings.storage.azure.connection_string)) diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index cdf35af..57c8ac0 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -1,12 +1,15 @@ import io from itertools import repeat +from operator import attrgetter + +from dynaconf import Dynaconf from kn_utils.logging import logger from minio import Minio -from operator import attrgetter from retry import retry -from pyinfra.config import Config from pyinfra.storage.storages.interface import Storage +from pyinfra.utils.config_validation import validate_settings, s3_storage_validators +from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint class S3Storage(Storage): @@ -63,13 +66,17 @@ class S3Storage(Storage): return zip(repeat(bucket_name), map(attrgetter("object_name"), objs)) -def get_s3_storage_from_config(config: Config): +def get_s3_storage_from_settings(settings: Dynaconf): + validate_settings(settings, s3_storage_validators) + + secure, endpoint = validate_and_parse_s3_endpoint(settings.storage.s3.endpoint) + return S3Storage( Minio( - secure=config.storage_secure_connection, - endpoint=config.storage_endpoint, - access_key=config.storage_key, - secret_key=config.storage_secret, - region=config.storage_region, + secure=secure, + endpoint=endpoint, + access_key=settings.storage.s3.key, + secret_key=settings.storage.s3.secret, + region=settings.storage.s3.region, ) ) diff --git a/pyinfra/utils/config_validation.py b/pyinfra/utils/config_validation.py index d257277..c2c2c13 100644 --- a/pyinfra/utils/config_validation.py +++ b/pyinfra/utils/config_validation.py @@ -13,6 +13,21 @@ queue_manager_validators = [ Validator("rabbitmq.dead_letter_queue", must_exist=True), ] +azure_storage_validators = [ + Validator("storage.azure.connection_string", must_exist=True), +] + +s3_storage_validators = [ + Validator("storage.s3.endpoint", must_exist=True), + Validator("storage.s3.key", must_exist=True), + Validator("storage.s3.secret", must_exist=True), + Validator("storage.s3.region", must_exist=True), +] + +storage_validators = [ + Validator("storage.backend", must_exist=True), +] + def validate_settings(settings: Dynaconf, validators): settings_valid = True diff --git a/tests/tests_with_docker_compose/storage_test.py b/tests/tests_with_docker_compose/storage_test.py index 2a8c0c7..6f0bbd6 100644 --- a/tests/tests_with_docker_compose/storage_test.py +++ b/tests/tests_with_docker_compose/storage_test.py @@ -1,24 +1,20 @@ import pytest -from pyinfra.storage.storage import get_storage_from_config +from pyinfra.storage.storage import get_storage_from_settings @pytest.fixture(scope="session") def storage(storage_backend, bucket_name, settings): - settings.storage_backend = storage_backend - settings.storage_bucket = bucket_name + settings.storage.backend = storage_backend - storage = get_storage_from_config(settings) + storage = get_storage_from_settings(settings) storage.make_bucket(bucket_name) yield storage storage.clear_bucket(bucket_name) -# @pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session") -# FIXME: Azure storage test needs the secret azure connection string -# when the config is refactored as file, add this and provide file via bitwarden -@pytest.mark.parametrize("storage_backend", ["s3"], scope="session") +@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session") @pytest.mark.parametrize("bucket_name", ["bucket"], scope="session") class TestStorage: def test_clearing_bucket_yields_empty_bucket(self, storage, bucket_name): From f31693d36a0899daa96f407ae563b57f053a56dd Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 16 Jan 2024 17:24:53 +0100 Subject: [PATCH 07/39] refactor: adapt prometheus monitoring logic to work with other webservers WIP --- poetry.lock | 218 ++++++++++++++++++++++++++++- pyinfra/monitor/__init__.py | 0 pyinfra/monitor/prometheus.py | 75 ++++++++++ pyinfra/utils/config_validation.py | 5 + pyproject.toml | 1 + 5 files changed, 298 insertions(+), 1 deletion(-) create mode 100644 pyinfra/monitor/__init__.py create mode 100644 pyinfra/monitor/prometheus.py diff --git a/poetry.lock b/poetry.lock index 927cefb..ff4b69b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,38 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "appnope" version = "0.1.3" @@ -607,6 +640,25 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +[[package]] +name = "fastapi" +version = "0.109.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.109.0-py3-none-any.whl", hash = "sha256:8c77515984cd8e8cfeb58364f8cc7a28f0692088475e2614f7bf03275eba9093"}, + {file = "fastapi-0.109.0.tar.gz", hash = "sha256:b978095b9ee01a5cf49b19f4bc1ac9b8ca83aa076e770ef8fd9af09a2b88d191"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.35.0,<0.36.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + [[package]] name = "funcy" version = "2.0" @@ -1150,6 +1202,142 @@ files = [ {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, ] +[[package]] +name = "pydantic" +version = "2.5.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, + {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.6" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.6" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, + {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, + {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, + {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, + {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, + {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, + {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, + {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, + {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, + {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, + {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, + {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, + {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, + {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, + {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, + {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" version = "2.17.2" @@ -1523,6 +1711,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "stack-data" version = "0.6.3" @@ -1542,6 +1741,23 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "starlette" +version = "0.35.1" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.35.1-py3-none-any.whl", hash = "sha256:50bbbda9baa098e361f398fda0928062abbaf1f54f4fadcbe17c092a01eb9a25"}, + {file = "starlette-0.35.1.tar.gz", hash = "sha256:3e2639dac3520e4f58734ed22553f950d3f3cb1001cd2eaac4d57e8cdc5f66bc"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + [[package]] name = "tomli" version = "2.0.1" @@ -1654,4 +1870,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "10df8149ba27804a95d2c9dcf05db0601fc5d3f774df1a4097fc21d301d2174c" +content-hash = "7ade3c9e8e0b7897da004073c8827e18d6e2d3c575a0c4f48ce46dcc58af1e1f" diff --git a/pyinfra/monitor/__init__.py b/pyinfra/monitor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyinfra/monitor/prometheus.py b/pyinfra/monitor/prometheus.py new file mode 100644 index 0000000..debf2da --- /dev/null +++ b/pyinfra/monitor/prometheus.py @@ -0,0 +1,75 @@ +from time import time +from typing import Sized, Callable, TypeVar + +from dynaconf import Dynaconf +from fastapi import FastAPI +from funcy import identity +from prometheus_client import generate_latest, CollectorRegistry, REGISTRY, Summary +from starlette.responses import Response + +from pyinfra.utils.config_validation import validate_settings, prometheus_validators + + +def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY) -> FastAPI: + """Add a prometheus endpoint to the app. It is recommended to use the default global registry. + You can register your own metrics with it anywhere, and they will be scraped with this endpoint. + See https://prometheus.io/docs/concepts/metric_types/ for the different metric types. + The implementation for monitoring the processing time of a function is in the decorator below (decorate the + processing function of a service to assess the processing time of each call). + + The convention for the metric name is {product_name}_{service_name}_{parameter_to_monitor}. + """ + + @app.get("/prometheus") + def prometheus_metrics(): + return Response(generate_latest(registry), media_type="text/plain") + + return app + + +def make_prometheus_processing_time_decorator_from_settings(settings: Dynaconf, registry: CollectorRegistry = REGISTRY): + """Make a decorator for monitoring the processing time of a function. The decorator is only applied if the + prometheus metrics are enabled in the settings. + """ + validate_settings(settings, validators=prometheus_validators) + + if not settings.metrics.prometheus.enabled: + return identity + + return make_prometheus_processing_time_decorator( + prefix=settings.metrics.prometheus.prefix, + registry=registry, + ) + + +Decorator = TypeVar("Decorator", bound=Callable[[Callable], Callable]) + + +def make_prometheus_processing_time_decorator( + prefix: str = "readactmanager_research_service", + registry: CollectorRegistry = REGISTRY, +) -> Decorator: + processing_time_sum = Summary( + f"{prefix}_processing_time", "Summed up average processing time per call.", registry=registry + ) + + def decorator(process_fn: Callable) -> Callable: + def inner(*args, **kwargs): + start = time() + + result: Sized = process_fn(*args, **kwargs) + + runtime = time() - start + + if not result: + return result + + processing_time_per_entity = runtime / len(result) + + processing_time_sum.observe(processing_time_per_entity) + + return result + + return inner + + return decorator diff --git a/pyinfra/utils/config_validation.py b/pyinfra/utils/config_validation.py index c2c2c13..056b4a3 100644 --- a/pyinfra/utils/config_validation.py +++ b/pyinfra/utils/config_validation.py @@ -28,6 +28,11 @@ storage_validators = [ Validator("storage.backend", must_exist=True), ] +prometheus_validators = [ + Validator("metrics.prometheus.prefix", must_exist=True), + Validator("metrics.prometheus.enabled", must_exist=True), +] + def validate_settings(settings: Dynaconf, validators): settings_valid = True diff --git a/pyproject.toml b/pyproject.toml index 3a8d82d..e6a7126 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ funcy = "^2" pycryptodome = "^3.19" # research shared packages kn-utils = { version = "^0.2.4.dev112", source = "gitlab-research" } +fastapi = "^0.109.0" [tool.poetry.group.dev.dependencies] pytest = "^7" From 358e227251f6ef9a6b7b61eb5c1f2a3d21ee8696 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 17 Jan 2024 17:39:53 +0100 Subject: [PATCH 08/39] fix prometheus tests WIP --- poetry.lock | 32 +++++++++++- pyinfra/monitor/prometheus.py | 19 +++---- pyinfra/utils/config_validation.py | 5 ++ pyinfra/webserver.py | 18 +++++++ pyproject.toml | 1 + .../prometheus_monitoring_test.py | 50 +++++++++++++++++++ 6 files changed, 113 insertions(+), 12 deletions(-) create mode 100644 pyinfra/webserver.py create mode 100644 tests/tests_with_docker_compose/prometheus_monitoring_test.py diff --git a/poetry.lock b/poetry.lock index ff4b69b..d820b51 100644 --- a/poetry.lock +++ b/poetry.lock @@ -670,6 +670,17 @@ files = [ {file = "funcy-2.0.tar.gz", hash = "sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb"}, ] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "idna" version = "3.6" @@ -1842,6 +1853,25 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uvicorn" +version = "0.26.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.26.0-py3-none-any.whl", hash = "sha256:cdb58ef6b8188c6c174994b2b1ba2150a9a8ae7ea5fb2f1b856b94a815d6071d"}, + {file = "uvicorn-0.26.0.tar.gz", hash = "sha256:48bfd350fce3c5c57af5fb4995fded8fb50da3b4feb543eb18ad7e0d54589602"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + [[package]] name = "wcwidth" version = "0.2.12" @@ -1870,4 +1900,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "7ade3c9e8e0b7897da004073c8827e18d6e2d3c575a0c4f48ce46dcc58af1e1f" +content-hash = "947961b5c6b624da6ff0644fa320e8854255dc893e0ffb697b67576c4da86eb8" diff --git a/pyinfra/monitor/prometheus.py b/pyinfra/monitor/prometheus.py index debf2da..8dc8b01 100644 --- a/pyinfra/monitor/prometheus.py +++ b/pyinfra/monitor/prometheus.py @@ -27,7 +27,12 @@ def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY return app -def make_prometheus_processing_time_decorator_from_settings(settings: Dynaconf, registry: CollectorRegistry = REGISTRY): +Decorator = TypeVar("Decorator", bound=Callable[[Callable], Callable]) + + +def make_prometheus_processing_time_decorator_from_settings( + settings: Dynaconf, registry: CollectorRegistry = REGISTRY +) -> Decorator: """Make a decorator for monitoring the processing time of a function. The decorator is only applied if the prometheus metrics are enabled in the settings. """ @@ -42,9 +47,6 @@ def make_prometheus_processing_time_decorator_from_settings(settings: Dynaconf, ) -Decorator = TypeVar("Decorator", bound=Callable[[Callable], Callable]) - - def make_prometheus_processing_time_decorator( prefix: str = "readactmanager_research_service", registry: CollectorRegistry = REGISTRY, @@ -57,16 +59,11 @@ def make_prometheus_processing_time_decorator( def inner(*args, **kwargs): start = time() - result: Sized = process_fn(*args, **kwargs) + result = process_fn(*args, **kwargs) runtime = time() - start - if not result: - return result - - processing_time_per_entity = runtime / len(result) - - processing_time_sum.observe(processing_time_per_entity) + processing_time_sum.observe(runtime) return result diff --git a/pyinfra/utils/config_validation.py b/pyinfra/utils/config_validation.py index 056b4a3..028f5d4 100644 --- a/pyinfra/utils/config_validation.py +++ b/pyinfra/utils/config_validation.py @@ -33,6 +33,11 @@ prometheus_validators = [ Validator("metrics.prometheus.enabled", must_exist=True), ] +webserver_validators = [ + Validator("webserver.host", must_exist=True), + Validator("webserver.port", must_exist=True), +] + def validate_settings(settings: Dynaconf, validators): settings_valid = True diff --git a/pyinfra/webserver.py b/pyinfra/webserver.py new file mode 100644 index 0000000..acecf24 --- /dev/null +++ b/pyinfra/webserver.py @@ -0,0 +1,18 @@ +import logging +import threading + +import uvicorn +from dynaconf import Dynaconf +from fastapi import FastAPI + +from pyinfra.utils.config_validation import validate_settings, webserver_validators + + +def create_webserver_thread(app: FastAPI, settings: Dynaconf) -> threading.Thread: + validate_settings(settings, validators=webserver_validators) + + return threading.Thread( + target=lambda: uvicorn.run( + app, port=settings.webserver.port, host=settings.webserver.host, log_level=logging.WARNING + ) + ) diff --git a/pyproject.toml b/pyproject.toml index e6a7126..3f3b41f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ pycryptodome = "^3.19" # research shared packages kn-utils = { version = "^0.2.4.dev112", source = "gitlab-research" } fastapi = "^0.109.0" +uvicorn = "^0.26.0" [tool.poetry.group.dev.dependencies] pytest = "^7" diff --git a/tests/tests_with_docker_compose/prometheus_monitoring_test.py b/tests/tests_with_docker_compose/prometheus_monitoring_test.py new file mode 100644 index 0000000..d6b7c8a --- /dev/null +++ b/tests/tests_with_docker_compose/prometheus_monitoring_test.py @@ -0,0 +1,50 @@ +from time import sleep + +import pytest +import requests +from fastapi import FastAPI + +from pyinfra.monitor.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings +from pyinfra.webserver import create_webserver_thread + + +@pytest.fixture(scope="function") +def app_with_prometheus_endpoint(settings): + app = FastAPI() + app = add_prometheus_endpoint(app) + thread = create_webserver_thread(app, settings) + thread.daemon = True + thread.start() + thread.join(timeout=1) + + +@pytest.fixture +def monitored_function(settings): + @make_prometheus_processing_time_decorator_from_settings(settings) + def process(*args, **kwargs): + sleep(0.5) + + return process + + +class TestPrometheusMonitor: + # def test_prometheus_endpoint_is_available(self, app_with_prometheus_endpoint, settings): + # resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + # assert resp.status_code == 200 + + def test_processing_with_a_monitored_fn_increases_parameter_counter( + self, app_with_prometheus_endpoint, monitored_function, settings + ): + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + print(resp.text) + monitored_function() + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + print(resp.text) + # assert resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count") == 1 + + monitored_function() + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + print(resp.text) + monitored_function() + # print(resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count")) + # assert resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count") == 2 From 17c5eebdf6f53b084bb0af23c6151125e82c4cb4 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 18 Jan 2024 08:19:46 +0100 Subject: [PATCH 09/39] finnish prometheus --- .../docker-compose.yml | 0 .../prometheus_monitoring_test.py | 26 ++++++++++--------- 2 files changed, 14 insertions(+), 12 deletions(-) rename tests/{tests_with_docker_compose => }/docker-compose.yml (100%) diff --git a/tests/tests_with_docker_compose/docker-compose.yml b/tests/docker-compose.yml similarity index 100% rename from tests/tests_with_docker_compose/docker-compose.yml rename to tests/docker-compose.yml diff --git a/tests/tests_with_docker_compose/prometheus_monitoring_test.py b/tests/tests_with_docker_compose/prometheus_monitoring_test.py index d6b7c8a..a6d0df1 100644 --- a/tests/tests_with_docker_compose/prometheus_monitoring_test.py +++ b/tests/tests_with_docker_compose/prometheus_monitoring_test.py @@ -1,3 +1,4 @@ +import re from time import sleep import pytest @@ -8,13 +9,15 @@ from pyinfra.monitor.prometheus import add_prometheus_endpoint, make_prometheus_ from pyinfra.webserver import create_webserver_thread -@pytest.fixture(scope="function") +@pytest.fixture(scope="class") def app_with_prometheus_endpoint(settings): app = FastAPI() app = add_prometheus_endpoint(app) thread = create_webserver_thread(app, settings) thread.daemon = True thread.start() + sleep(1) + yield thread.join(timeout=1) @@ -28,23 +31,22 @@ def monitored_function(settings): class TestPrometheusMonitor: - # def test_prometheus_endpoint_is_available(self, app_with_prometheus_endpoint, settings): - # resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") - # assert resp.status_code == 200 + def test_prometheus_endpoint_is_available(self, app_with_prometheus_endpoint, settings): + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + assert resp.status_code == 200 def test_processing_with_a_monitored_fn_increases_parameter_counter( self, app_with_prometheus_endpoint, monitored_function, settings ): + pattern = re.compile(rf".*{settings.metrics.prometheus.prefix}_processing_time_count (\d\.\d).*") + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") - print(resp.text) - monitored_function() - resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") - print(resp.text) - # assert resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count") == 1 + assert pattern.search(resp.text).group(1) == "0.0" monitored_function() resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") - print(resp.text) + assert pattern.search(resp.text).group(1) == "1.0" + monitored_function() - # print(resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count")) - # assert resp.text.count(f"{settings.metrics.prometheus.prefix}_processing_time_count") == 2 + resp = requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/prometheus") + assert pattern.search(resp.text).group(1) == "2.0" From ec5ad09fa8d07ad9f9b51531d6971e9cf6046a5a Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 18 Jan 2024 11:22:27 +0100 Subject: [PATCH 10/39] refactor: multi tenant storage connection --- pyinfra/payload_processing/monitor.py | 57 -------- pyinfra/storage/connection.py | 124 ++++++++++++++++ pyinfra/storage/storage.py | 51 ------- pyinfra/storage/storage_info.py | 125 ---------------- pyinfra/storage/storage_provider.py | 55 ------- pyinfra/storage/storages/azure.py | 57 ++++---- pyinfra/storage/storages/interface.py | 21 +-- pyinfra/storage/storages/mock.py | 31 ++-- pyinfra/storage/storages/s3.py | 54 +++---- pyinfra/utils/config_validation.py | 8 ++ pyinfra/webserver.py | 12 +- .../prometheus_monitoring_test.py | 4 +- .../tests_with_docker_compose/storage_test.py | 135 ++++++++++++------ 13 files changed, 328 insertions(+), 406 deletions(-) delete mode 100644 pyinfra/payload_processing/monitor.py create mode 100644 pyinfra/storage/connection.py delete mode 100644 pyinfra/storage/storage.py delete mode 100644 pyinfra/storage/storage_info.py delete mode 100644 pyinfra/storage/storage_provider.py diff --git a/pyinfra/payload_processing/monitor.py b/pyinfra/payload_processing/monitor.py deleted file mode 100644 index 5ea2d94..0000000 --- a/pyinfra/payload_processing/monitor.py +++ /dev/null @@ -1,57 +0,0 @@ -from funcy import identity -from operator import attrgetter -from prometheus_client import Summary, start_http_server, CollectorRegistry -from time import time -from typing import Callable, Any, Sized - -from pyinfra.config import Config - - -class PrometheusMonitor: - def __init__(self, prefix: str, host: str, port: int): - """Register the monitoring metrics and start a webserver where they can be scraped at the endpoint - http://{host}:{port}/prometheus - - Args: - prefix: should per convention consist of {product_name}_{service_name}_{parameter_to_monitor} - parameter_to_monitor is defined by the result of the processing service. - """ - self.registry = CollectorRegistry() - - self.entity_processing_time_sum = Summary( - f"{prefix}_processing_time", "Summed up average processing time per entity observed", registry=self.registry - ) - - start_http_server(port, host, self.registry) - - def __call__(self, process_fn: Callable) -> Callable: - """Monitor the runtime of a function and update the registered metric with the average runtime per resulting - element. - """ - return self._add_result_monitoring(process_fn) - - def _add_result_monitoring(self, process_fn: Callable): - def inner(data: Any, **kwargs): - start = time() - - result: Sized = process_fn(data, **kwargs) - - runtime = time() - start - - if not result: - return result - - processing_time_per_entity = runtime / len(result) - - self.entity_processing_time_sum.observe(processing_time_per_entity) - - return result - - return inner - - -def get_monitor_from_config(config: Config) -> Callable: - if config.monitoring_enabled: - return PrometheusMonitor(*attrgetter("prometheus_metric_prefix", "prometheus_host", "prometheus_port")(config)) - else: - return identity diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py new file mode 100644 index 0000000..9c06349 --- /dev/null +++ b/pyinfra/storage/connection.py @@ -0,0 +1,124 @@ +from functools import lru_cache, partial +from typing import Callable + +import requests +from dynaconf import Dynaconf +from funcy import compose +from kn_utils.logging import logger + +from pyinfra.storage.storages.azure import get_azure_storage_from_settings +from pyinfra.storage.storages.interface import Storage +from pyinfra.storage.storages.s3 import get_s3_storage_from_settings +from pyinfra.utils.cipher import decrypt +from pyinfra.utils.compressing import get_decompressor, get_compressor +from pyinfra.utils.config_validation import validate_settings, storage_validators, multi_tenant_storage_validators +from pyinfra.utils.encoding import get_decoder, get_encoder + + +def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: + """Get storage connection based on settings. + If tenant_id is provided, gets storage connection information from tenant server instead. + The connections are cached based on the settings.cache_size value. + + In the future, when the default storage from config is no longer needed (only multi-tenant storage will be used), + get_storage_from_tenant_id can replace this function directly. + """ + if tenant_id: + logger.info(f"Using tenant storage for {tenant_id}.") + return get_storage_from_tenant_id(tenant_id, settings) + else: + logger.info("Using default storage.") + return get_storage_from_settings(settings) + + +def get_storage_from_settings(settings: Dynaconf) -> Storage: + validate_settings(settings, storage_validators) + + @lru_cache(maxsize=settings.storage.cache_size) + def _get_storage(backend: str) -> Storage: + return storage_dispatcher[backend](settings) + + return _get_storage(settings.storage.backend) + + +def get_storage_from_tenant_id(tenant_id: str, settings: Dynaconf) -> Storage: + validate_settings(settings, multi_tenant_storage_validators) + + @lru_cache(maxsize=settings.storage.cache_size) + def _get_storage(tenant: str, endpoint: str, public_key: str) -> Storage: + response = requests.get(f"{endpoint}/{tenant}").json() + + maybe_azure = response.get("azureStorageConnection") + maybe_s3 = response.get("s3StorageConnection") + assert (maybe_azure or maybe_s3) and not (maybe_azure and maybe_s3), "Only one storage backend can be used." + + if maybe_azure: + connection_string = decrypt(public_key, maybe_azure["connectionString"]) + backend = "azure" + storage_settings = { + "storage": { + "azure": { + "connection_string": connection_string, + "container": maybe_azure["containerName"], + }, + } + } + elif maybe_s3: + secret = decrypt(public_key, maybe_s3["secret"]) + backend = "s3" + storage_settings = { + "storage": { + "s3": { + "endpoint": maybe_s3["endpoint"], + "key": maybe_s3["key"], + "secret": secret, + "region": maybe_s3["region"], + "bucket": maybe_s3["bucketName"], + }, + } + } + else: + raise Exception(f"Unknown storage backend in {response}.") + + storage_settings = Dynaconf() + storage_settings.update(settings) + + storage = storage_dispatcher[backend](storage_settings) + + return storage + + return _get_storage(tenant_id, settings.storage.tenant_server.endpoint, settings.storage.tenant_server.public_key) + + +storage_dispatcher = { + "azure": get_azure_storage_from_settings, + "s3": get_s3_storage_from_settings, +} + + +@lru_cache(maxsize=10) +def make_downloader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: + verify = partial(verify_existence, storage, bucket) + download = partial(storage.get_object, bucket) + decompress = get_decompressor(compression_type) + decode = get_decoder(file_type) + + return compose(decode, decompress, download, verify) + + +@lru_cache(maxsize=10) +def make_uploader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: + upload = partial(storage.put_object, bucket) + compress = get_compressor(compression_type) + encode = get_encoder(file_type) + + def inner(file_name, file_bytes): + upload(file_name, compose(compress, encode)(file_bytes)) + + return inner + + +def verify_existence(storage: Storage, bucket: str, file_name: str) -> str: + if not storage.exists(file_name): + raise FileNotFoundError(f"{file_name=} name not found on storage in {storage.bucket=}.") + return file_name diff --git a/pyinfra/storage/storage.py b/pyinfra/storage/storage.py deleted file mode 100644 index c452f0d..0000000 --- a/pyinfra/storage/storage.py +++ /dev/null @@ -1,51 +0,0 @@ -from functools import lru_cache, partial -from typing import Callable - -from dynaconf import Dynaconf -from funcy import compose - -from pyinfra.storage.storages.interface import Storage -from pyinfra.storage.storages.s3 import get_s3_storage_from_settings -from pyinfra.utils.compressing import get_decompressor, get_compressor -from pyinfra.utils.config_validation import validate_settings, storage_validators -from pyinfra.utils.encoding import get_decoder, get_encoder - - -def get_storage_from_settings(settings: Dynaconf) -> Storage: - validate_settings(settings, storage_validators) - - return storage_dispatcher[settings.storage.backend](settings) - - -storage_dispatcher = { - "azure": get_s3_storage_from_settings, - "s3": get_s3_storage_from_settings, -} - - -@lru_cache(maxsize=10) -def make_downloader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - verify = partial(verify_existence, storage, bucket) - download = partial(storage.get_object, bucket) - decompress = get_decompressor(compression_type) - decode = get_decoder(file_type) - - return compose(decode, decompress, download, verify) - - -@lru_cache(maxsize=10) -def make_uploader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - upload = partial(storage.put_object, bucket) - compress = get_compressor(compression_type) - encode = get_encoder(file_type) - - def inner(file_name, file_bytes): - upload(file_name, compose(compress, encode)(file_bytes)) - - return inner - - -def verify_existence(storage: Storage, bucket: str, file_name: str) -> str: - if not storage.exists(bucket, file_name): - raise FileNotFoundError(f"{file_name=} name not found on storage in {bucket=}.") - return file_name diff --git a/pyinfra/storage/storage_info.py b/pyinfra/storage/storage_info.py deleted file mode 100644 index aefa15b..0000000 --- a/pyinfra/storage/storage_info.py +++ /dev/null @@ -1,125 +0,0 @@ -from dataclasses import dataclass - -import requests -from azure.storage.blob import BlobServiceClient -from minio import Minio - -from pyinfra.config import Config -from pyinfra.exception import UnknownStorageBackend -from pyinfra.storage.storages.azure import AzureStorage -from pyinfra.storage.storages.interface import Storage -from pyinfra.storage.storages.s3 import S3Storage -from pyinfra.utils.cipher import decrypt -from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint - - -@dataclass(frozen=True) -class StorageInfo: - bucket_name: str - - -@dataclass(frozen=True) -class AzureStorageInfo(StorageInfo): - connection_string: str - - def __hash__(self): - return hash(self.connection_string) - - def __eq__(self, other): - if not isinstance(other, AzureStorageInfo): - return False - return self.connection_string == other.connection_string - - -@dataclass(frozen=True) -class S3StorageInfo(StorageInfo): - secure: bool - endpoint: str - access_key: str - secret_key: str - region: str - - def __hash__(self): - return hash((self.secure, self.endpoint, self.access_key, self.secret_key, self.region)) - - def __eq__(self, other): - if not isinstance(other, S3StorageInfo): - return False - return ( - self.secure == other.secure - and self.endpoint == other.endpoint - and self.access_key == other.access_key - and self.secret_key == other.secret_key - and self.region == other.region - ) - - -def get_storage_from_storage_info(storage_info: StorageInfo) -> Storage: - if isinstance(storage_info, AzureStorageInfo): - return AzureStorage(BlobServiceClient.from_connection_string(conn_str=storage_info.connection_string)) - elif isinstance(storage_info, S3StorageInfo): - return S3Storage( - Minio( - secure=storage_info.secure, - endpoint=storage_info.endpoint, - access_key=storage_info.access_key, - secret_key=storage_info.secret_key, - region=storage_info.region, - ) - ) - else: - raise UnknownStorageBackend() - - -def get_storage_info_from_endpoint(public_key: str, endpoint: str, x_tenant_id: str) -> StorageInfo: - resp = requests.get(f"{endpoint}/{x_tenant_id}").json() - - maybe_azure = resp.get("azureStorageConnection") - maybe_s3 = resp.get("s3StorageConnection") - assert not (maybe_azure and maybe_s3) - - if maybe_azure: - connection_string = decrypt(public_key, maybe_azure["connectionString"]) - storage_info = AzureStorageInfo( - connection_string=connection_string, - bucket_name=maybe_azure["containerName"], - ) - elif maybe_s3: - secure, endpoint = validate_and_parse_s3_endpoint(maybe_s3["endpoint"]) - secret = decrypt(public_key, maybe_s3["secret"]) - - storage_info = S3StorageInfo( - secure=secure, - endpoint=endpoint, - access_key=maybe_s3["key"], - secret_key=secret, - region=maybe_s3["region"], - bucket_name=maybe_s3["bucketName"], - ) - else: - raise UnknownStorageBackend() - - return storage_info - - -def get_storage_info_from_config(config: Config) -> StorageInfo: - if config.storage_backend == "s3": - storage_info = S3StorageInfo( - secure=config.storage_secure_connection, - endpoint=config.storage_endpoint, - access_key=config.storage_key, - secret_key=config.storage_secret, - region=config.storage_region, - bucket_name=config.storage_bucket, - ) - - elif config.storage_backend == "azure": - storage_info = AzureStorageInfo( - connection_string=config.storage_azureconnectionstring, - bucket_name=config.storage_bucket, - ) - - else: - raise UnknownStorageBackend(f"Unknown storage backend '{config.storage_backend}'.") - - return storage_info diff --git a/pyinfra/storage/storage_provider.py b/pyinfra/storage/storage_provider.py deleted file mode 100644 index 345a096..0000000 --- a/pyinfra/storage/storage_provider.py +++ /dev/null @@ -1,55 +0,0 @@ -from dataclasses import asdict -from functools import partial, lru_cache -from kn_utils.logging import logger -from typing import Tuple - -from pyinfra.config import Config -from pyinfra.storage.storage_info import ( - get_storage_info_from_config, - get_storage_info_from_endpoint, - StorageInfo, - get_storage_from_storage_info, -) -from pyinfra.storage.storages.interface import Storage - - -class StorageProvider: - def __init__(self, config: Config): - self.config = config - self.default_storage_info: StorageInfo = get_storage_info_from_config(config) - - self.get_storage_info_from_tenant_id = partial( - get_storage_info_from_endpoint, - config.tenant_decryption_public_key, - config.tenant_endpoint, - ) - - def __call__(self, *args, **kwargs): - return self._connect(*args, **kwargs) - - @lru_cache(maxsize=32) - def _connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]: - storage_info = self._get_storage_info(x_tenant_id) - storage_connection = get_storage_from_storage_info(storage_info) - return storage_connection, storage_info - - def _get_storage_info(self, x_tenant_id=None): - if x_tenant_id: - storage_info = self.get_storage_info_from_tenant_id(x_tenant_id) - logger.debug(f"Received {storage_info.__class__.__name__} for {x_tenant_id} from endpoint.") - logger.trace(f"{asdict(storage_info)}") - else: - storage_info = self.default_storage_info - logger.debug(f"Using local default {storage_info.__class__.__name__} for {x_tenant_id}.") - logger.trace(f"{asdict(storage_info)}") - - return storage_info - - -class StorageProviderMock(StorageProvider): - def __init__(self, storage, storage_info): - self.storage = storage - self.storage_info = storage_info - - def __call__(self, *args, **kwargs): - return self.storage, self.storage_info diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index 4c7467c..1e56630 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -15,47 +15,52 @@ logging.getLogger("urllib3").setLevel(logging.WARNING) class AzureStorage(Storage): - def __init__(self, client: BlobServiceClient): + def __init__(self, client: BlobServiceClient, bucket: str): self._client: BlobServiceClient = client + self._bucket = bucket - def has_bucket(self, bucket_name): - container_client = self._client.get_container_client(bucket_name) + @property + def bucket(self): + return self._bucket + + def has_bucket(self): + container_client = self._client.get_container_client(self.bucket) return container_client.exists() - def make_bucket(self, bucket_name): - container_client = self._client.get_container_client(bucket_name) - container_client if container_client.exists() else self._client.create_container(bucket_name) + def make_bucket(self): + container_client = self._client.get_container_client(self.bucket) + container_client if container_client.exists() else self._client.create_container(self.bucket) - def __provide_container_client(self, bucket_name) -> ContainerClient: - self.make_bucket(bucket_name) - container_client = self._client.get_container_client(bucket_name) + def __provide_container_client(self) -> ContainerClient: + self.make_bucket() + container_client = self._client.get_container_client(self.bucket) return container_client - def put_object(self, bucket_name, object_name, data): + def put_object(self, object_name, data): logger.debug(f"Uploading '{object_name}'...") - container_client = self.__provide_container_client(bucket_name) + container_client = self.__provide_container_client() blob_client = container_client.get_blob_client(object_name) blob_client.upload_blob(data, overwrite=True) - def exists(self, bucket_name, object_name): - container_client = self.__provide_container_client(bucket_name) + def exists(self, object_name): + container_client = self.__provide_container_client() blob_client = container_client.get_blob_client(object_name) return blob_client.exists() @retry(tries=3, delay=5, jitter=(1, 3)) - def get_object(self, bucket_name, object_name): + def get_object(self, object_name): logger.debug(f"Downloading '{object_name}'...") try: - container_client = self.__provide_container_client(bucket_name) + container_client = self.__provide_container_client() blob_client = container_client.get_blob_client(object_name) blob_data = blob_client.download_blob() return blob_data.readall() except Exception as err: raise Exception("Failed getting object from azure client") from err - def get_all_objects(self, bucket_name): - container_client = self.__provide_container_client(bucket_name) + def get_all_objects(self): + container_client = self.__provide_container_client() blobs = container_client.list_blobs() for blob in blobs: logger.debug(f"Downloading '{blob.name}'...") @@ -64,18 +69,22 @@ class AzureStorage(Storage): data = blob_data.readall() yield data - def clear_bucket(self, bucket_name): - logger.debug(f"Clearing Azure container '{bucket_name}'...") - container_client = self._client.get_container_client(bucket_name) + def clear_bucket(self): + logger.debug(f"Clearing Azure container '{self.bucket}'...") + container_client = self._client.get_container_client(self.bucket) blobs = container_client.list_blobs() container_client.delete_blobs(*blobs) - def get_all_object_names(self, bucket_name): - container_client = self.__provide_container_client(bucket_name) + def get_all_object_names(self): + container_client = self.__provide_container_client() blobs = container_client.list_blobs() - return zip(repeat(bucket_name), map(attrgetter("name"), blobs)) + return zip(repeat(self.bucket), map(attrgetter("name"), blobs)) def get_azure_storage_from_settings(settings: Dynaconf): validate_settings(settings, azure_storage_validators) - return AzureStorage(BlobServiceClient.from_connection_string(conn_str=settings.storage.azure.connection_string)) + + return AzureStorage( + client=BlobServiceClient.from_connection_string(conn_str=settings.storage.azure.connection_string), + bucket=settings.storage.azure.container, + ) diff --git a/pyinfra/storage/storages/interface.py b/pyinfra/storage/storages/interface.py index f5530d6..6283f13 100644 --- a/pyinfra/storage/storages/interface.py +++ b/pyinfra/storage/storages/interface.py @@ -2,34 +2,39 @@ from abc import ABC, abstractmethod class Storage(ABC): + @property @abstractmethod - def make_bucket(self, bucket_name): + def bucket(self): raise NotImplementedError @abstractmethod - def has_bucket(self, bucket_name): + def make_bucket(self): raise NotImplementedError @abstractmethod - def put_object(self, bucket_name, object_name, data): + def has_bucket(self): raise NotImplementedError @abstractmethod - def exists(self, bucket_name, object_name): + def put_object(self, object_name, data): raise NotImplementedError @abstractmethod - def get_object(self, bucket_name, object_name): + def exists(self, object_name): raise NotImplementedError @abstractmethod - def get_all_objects(self, bucket_name): + def get_object(self, object_name): raise NotImplementedError @abstractmethod - def clear_bucket(self, bucket_name): + def get_all_objects(self): raise NotImplementedError @abstractmethod - def get_all_object_names(self, bucket_name): + def clear_bucket(self): + raise NotImplementedError + + @abstractmethod + def get_all_object_names(self): raise NotImplementedError diff --git a/pyinfra/storage/storages/mock.py b/pyinfra/storage/storages/mock.py index b209399..2a77bca 100644 --- a/pyinfra/storage/storages/mock.py +++ b/pyinfra/storage/storages/mock.py @@ -5,32 +5,35 @@ class StorageMock(Storage): def __init__(self, data: bytes = None, file_name: str = None, bucket: str = None): self.data = data self.file_name = file_name - self.bucket = bucket + self._bucket = bucket - def make_bucket(self, bucket_name): - self.bucket = bucket_name + @property + def bucket(self): + return self._bucket - def has_bucket(self, bucket_name): - return self.bucket == bucket_name + def make_bucket(self): + pass - def put_object(self, bucket_name, object_name, data): - self.bucket = bucket_name + def has_bucket(self): + return True + + def put_object(self, object_name, data): self.file_name = object_name self.data = data - def exists(self, bucket_name, object_name): - return self.bucket == bucket_name and self.file_name == object_name + def exists(self, object_name): + return self.file_name == object_name - def get_object(self, bucket_name, object_name): + def get_object(self, object_name): return self.data - def get_all_objects(self, bucket_name): + def get_all_objects(self): raise NotImplementedError - def clear_bucket(self, bucket_name): - self.bucket = None + def clear_bucket(self): + self._bucket = None self.file_name = None self.data = None - def get_all_object_names(self, bucket_name): + def get_all_object_names(self): raise NotImplementedError diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index 57c8ac0..dcc151d 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -13,35 +13,40 @@ from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint class S3Storage(Storage): - def __init__(self, client: Minio): + def __init__(self, client: Minio, bucket: str): self._client = client + self._bucket = bucket - def make_bucket(self, bucket_name): - if not self.has_bucket(bucket_name): - self._client.make_bucket(bucket_name) + @property + def bucket(self): + return self._bucket - def has_bucket(self, bucket_name): - return self._client.bucket_exists(bucket_name) + def make_bucket(self): + if not self.has_bucket(): + self._client.make_bucket(self.bucket) - def put_object(self, bucket_name, object_name, data): + def has_bucket(self): + return self._client.bucket_exists(self.bucket) + + def put_object(self, object_name, data): logger.debug(f"Uploading '{object_name}'...") data = io.BytesIO(data) - self._client.put_object(bucket_name, object_name, data, length=data.getbuffer().nbytes) + self._client.put_object(self.bucket, object_name, data, length=data.getbuffer().nbytes) - def exists(self, bucket_name, object_name): + def exists(self, object_name): try: - self._client.stat_object(bucket_name, object_name) + self._client.stat_object(self.bucket, object_name) return True except Exception: return False @retry(tries=3, delay=5, jitter=(1, 3)) - def get_object(self, bucket_name, object_name): + def get_object(self, object_name): logger.debug(f"Downloading '{object_name}'...") response = None try: - response = self._client.get_object(bucket_name, object_name) + response = self._client.get_object(self.bucket, object_name) return response.data except Exception as err: raise Exception("Failed getting object from s3 client") from err @@ -50,20 +55,20 @@ class S3Storage(Storage): response.close() response.release_conn() - def get_all_objects(self, bucket_name): - for obj in self._client.list_objects(bucket_name, recursive=True): + def get_all_objects(self): + for obj in self._client.list_objects(self.bucket, recursive=True): logger.debug(f"Downloading '{obj.object_name}'...") - yield self.get_object(bucket_name, obj.object_name) + yield self.get_object(obj.object_name) - def clear_bucket(self, bucket_name): - logger.debug(f"Clearing S3 bucket '{bucket_name}'...") - objects = self._client.list_objects(bucket_name, recursive=True) + def clear_bucket(self): + logger.debug(f"Clearing S3 bucket '{self.bucket}'...") + objects = self._client.list_objects(self.bucket, recursive=True) for obj in objects: - self._client.remove_object(bucket_name, obj.object_name) + self._client.remove_object(self.bucket, obj.object_name) - def get_all_object_names(self, bucket_name): - objs = self._client.list_objects(bucket_name, recursive=True) - return zip(repeat(bucket_name), map(attrgetter("object_name"), objs)) + def get_all_object_names(self): + objs = self._client.list_objects(self.bucket, recursive=True) + return zip(repeat(self.bucket), map(attrgetter("object_name"), objs)) def get_s3_storage_from_settings(settings: Dynaconf): @@ -72,11 +77,12 @@ def get_s3_storage_from_settings(settings: Dynaconf): secure, endpoint = validate_and_parse_s3_endpoint(settings.storage.s3.endpoint) return S3Storage( - Minio( + client=Minio( secure=secure, endpoint=endpoint, access_key=settings.storage.s3.key, secret_key=settings.storage.s3.secret, region=settings.storage.s3.region, - ) + ), + bucket=settings.storage.s3.bucket, ) diff --git a/pyinfra/utils/config_validation.py b/pyinfra/utils/config_validation.py index 028f5d4..629891d 100644 --- a/pyinfra/utils/config_validation.py +++ b/pyinfra/utils/config_validation.py @@ -15,6 +15,7 @@ queue_manager_validators = [ azure_storage_validators = [ Validator("storage.azure.connection_string", must_exist=True), + Validator("storage.azure.container", must_exist=True), ] s3_storage_validators = [ @@ -22,12 +23,19 @@ s3_storage_validators = [ Validator("storage.s3.key", must_exist=True), Validator("storage.s3.secret", must_exist=True), Validator("storage.s3.region", must_exist=True), + Validator("storage.s3.bucket", must_exist=True), ] storage_validators = [ Validator("storage.backend", must_exist=True), ] +multi_tenant_storage_validators = [ + Validator("storage.tenant_server.endpoint", must_exist=True), + Validator("storage.tenant_server.public_key", must_exist=True), +] + + prometheus_validators = [ Validator("metrics.prometheus.prefix", must_exist=True), Validator("metrics.prometheus.enabled", must_exist=True), diff --git a/pyinfra/webserver.py b/pyinfra/webserver.py index acecf24..0d37139 100644 --- a/pyinfra/webserver.py +++ b/pyinfra/webserver.py @@ -8,11 +8,11 @@ from fastapi import FastAPI from pyinfra.utils.config_validation import validate_settings, webserver_validators -def create_webserver_thread(app: FastAPI, settings: Dynaconf) -> threading.Thread: +def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: validate_settings(settings, validators=webserver_validators) - return threading.Thread( - target=lambda: uvicorn.run( - app, port=settings.webserver.port, host=settings.webserver.host, log_level=logging.WARNING - ) - ) + return create_webserver_thread(app=app, port=settings.webserver.port, host=settings.webserver.host) + + +def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thread: + return threading.Thread(target=lambda: uvicorn.run(app, port=port, host=host, log_level=logging.WARNING)) diff --git a/tests/tests_with_docker_compose/prometheus_monitoring_test.py b/tests/tests_with_docker_compose/prometheus_monitoring_test.py index a6d0df1..75da089 100644 --- a/tests/tests_with_docker_compose/prometheus_monitoring_test.py +++ b/tests/tests_with_docker_compose/prometheus_monitoring_test.py @@ -6,14 +6,14 @@ import requests from fastapi import FastAPI from pyinfra.monitor.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings -from pyinfra.webserver import create_webserver_thread +from pyinfra.webserver import create_webserver_thread_from_settings @pytest.fixture(scope="class") def app_with_prometheus_endpoint(settings): app = FastAPI() app = add_prometheus_endpoint(app) - thread = create_webserver_thread(app, settings) + thread = create_webserver_thread_from_settings(app, settings) thread.daemon = True thread.start() sleep(1) diff --git a/tests/tests_with_docker_compose/storage_test.py b/tests/tests_with_docker_compose/storage_test.py index 6f0bbd6..1f97468 100644 --- a/tests/tests_with_docker_compose/storage_test.py +++ b/tests/tests_with_docker_compose/storage_test.py @@ -1,64 +1,119 @@ +from time import sleep + import pytest +from fastapi import FastAPI -from pyinfra.storage.storage import get_storage_from_settings +from pyinfra.storage.connection import get_storage_from_settings, get_storage_from_tenant_id +from pyinfra.utils.cipher import encrypt +from pyinfra.webserver import create_webserver_thread -@pytest.fixture(scope="session") -def storage(storage_backend, bucket_name, settings): +@pytest.fixture(scope="class") +def storage(storage_backend, settings): settings.storage.backend = storage_backend storage = get_storage_from_settings(settings) - storage.make_bucket(bucket_name) + storage.make_bucket() yield storage - storage.clear_bucket(bucket_name) + storage.clear_bucket() -@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session") -@pytest.mark.parametrize("bucket_name", ["bucket"], scope="session") +@pytest.fixture(scope="class") +def tenant_server_mock(settings, tenant_server_host, tenant_server_port): + app = FastAPI() + + @app.get("/azure_tenant") + def get_azure_storage_info(): + return { + "azureStorageConnection": { + "connectionString": encrypt( + settings.storage.tenant_server.public_key, settings.storage.azure.connection_string + ), + "containerName": settings.storage.azure.container, + } + } + + @app.get("/s3_tenant") + def get_s3_storage_info(): + return { + "s3StorageConnection": { + "endpoint": settings.storage.s3.endpoint, + "key": settings.storage.s3.key, + "secret": encrypt(settings.storage.tenant_server.public_key, settings.storage.s3.secret), + "region": settings.storage.s3.region, + "bucketName": settings.storage.s3.bucket, + } + } + + thread = create_webserver_thread(app, tenant_server_port, tenant_server_host) + thread.daemon = True + thread.start() + sleep(1) + yield + thread.join(timeout=1) + + +@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="class") class TestStorage: - def test_clearing_bucket_yields_empty_bucket(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - data_received = storage.get_all_objects(bucket_name) + def test_clearing_bucket_yields_empty_bucket(self, storage): + storage.clear_bucket() + data_received = storage.get_all_objects() assert not {*data_received} - def test_getting_object_put_in_bucket_is_object(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, "file", b"content") - data_received = storage.get_object(bucket_name, "file") + def test_getting_object_put_in_bucket_is_object(self, storage): + storage.clear_bucket() + storage.put_object("file", b"content") + data_received = storage.get_object("file") assert b"content" == data_received - def test_object_put_in_bucket_exists_on_storage(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, "file", b"content") - assert storage.exists(bucket_name, "file") + def test_object_put_in_bucket_exists_on_storage(self, storage): + storage.clear_bucket() + storage.put_object("file", b"content") + assert storage.exists("file") - def test_getting_nested_object_put_in_bucket_is_nested_object(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, "folder/file", b"content") - data_received = storage.get_object(bucket_name, "folder/file") + def test_getting_nested_object_put_in_bucket_is_nested_object(self, storage): + storage.clear_bucket() + storage.put_object("folder/file", b"content") + data_received = storage.get_object("folder/file") assert b"content" == data_received - def test_getting_objects_put_in_bucket_are_objects(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, "file1", b"content 1") - storage.put_object(bucket_name, "folder/file2", b"content 2") - data_received = storage.get_all_objects(bucket_name) + def test_getting_objects_put_in_bucket_are_objects(self, storage): + storage.clear_bucket() + storage.put_object("file1", b"content 1") + storage.put_object("folder/file2", b"content 2") + data_received = storage.get_all_objects() assert {b"content 1", b"content 2"} == {*data_received} - def test_make_bucket_produces_bucket(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.make_bucket(bucket_name) - assert storage.has_bucket(bucket_name) + def test_make_bucket_produces_bucket(self, storage): + storage.clear_bucket() + storage.make_bucket() + assert storage.has_bucket() - def test_listing_bucket_files_yields_all_files_in_bucket(self, storage, bucket_name): - storage.clear_bucket(bucket_name) - storage.put_object(bucket_name, "file1", b"content 1") - storage.put_object(bucket_name, "file2", b"content 2") - full_names_received = storage.get_all_object_names(bucket_name) - assert {(bucket_name, "file1"), (bucket_name, "file2")} == {*full_names_received} + def test_listing_bucket_files_yields_all_files_in_bucket(self, storage): + storage.clear_bucket() + storage.put_object("file1", b"content 1") + storage.put_object("file2", b"content 2") + full_names_received = storage.get_all_object_names() + assert {(storage.bucket, "file1"), (storage.bucket, "file2")} == {*full_names_received} - def test_data_loading_failure_raised_if_object_not_present(self, storage, bucket_name): - storage.clear_bucket(bucket_name) + def test_data_loading_failure_raised_if_object_not_present(self, storage): + storage.clear_bucket() with pytest.raises(Exception): - storage.get_object(bucket_name, "folder/file") + storage.get_object("folder/file") + + +@pytest.mark.parametrize("tenant_id", ["azure_tenant", "s3_tenant"], scope="class") +@pytest.mark.parametrize("tenant_server_host", ["localhost"], scope="class") +@pytest.mark.parametrize("tenant_server_port", [8000], scope="class") +class TestMultiTenantStorage: + def test_storage_connection_from_tenant_id( + self, tenant_id, tenant_server_mock, settings, tenant_server_host, tenant_server_port + ): + settings["storage"]["tenant_server"]["endpoint"] = f"http://{tenant_server_host}:{tenant_server_port}" + storage = get_storage_from_tenant_id(tenant_id, settings) + + storage.put_object("file", b"content") + data_received = storage.get_object("file") + + assert b"content" == data_received From 6802bf5960eef530113e630d6bfa45f1bb82d90a Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 18 Jan 2024 15:45:28 +0100 Subject: [PATCH 11/39] refactor: download and upload file logic, module structure, remove redundant files so far --- pyinfra/config.py | 131 ---------------- pyinfra/config/__init__.py | 0 pyinfra/config/loader.py | 23 +++ .../validation.py} | 0 pyinfra/exception.py | 5 - pyinfra/monitor/prometheus.py | 4 +- pyinfra/queue/manager.py | 2 +- pyinfra/storage/connection.py | 44 +----- pyinfra/storage/storages/azure.py | 4 +- pyinfra/storage/storages/mock.py | 39 ----- pyinfra/storage/storages/s3.py | 4 +- .../storages/{interface.py => storage.py} | 0 pyinfra/storage/utils.py | 106 +++++++++++++ pyinfra/utils/compressing.py | 22 --- pyinfra/utils/dict.py | 5 - pyinfra/utils/encoding.py | 28 ---- pyinfra/utils/file_extension_parsing.py | 41 ----- pyinfra/webserver/__init__.py | 0 pyinfra/{webserver.py => webserver/utils.py} | 2 +- pyproject.toml | 1 - tests/conftest.py | 142 +----------------- tests/docker-compose.yml | 2 +- .../{unit_tests => unit_test}/cipher_test.py | 0 .../knutils_logger_test.py} | 0 .../prometheus_monitoring_test.py | 2 +- .../queue_test.py | 0 .../storage_test.py | 111 +++++++++----- .../unit_tests/file_extension_parsing_test.py | 32 ---- tests/unit_tests/monitor_test.py | 44 ------ tests/unit_tests/payload_test.py | 48 ------ tests/unit_tests/processor_test.py | 81 ---------- 31 files changed, 221 insertions(+), 702 deletions(-) delete mode 100644 pyinfra/config.py create mode 100644 pyinfra/config/__init__.py create mode 100644 pyinfra/config/loader.py rename pyinfra/{utils/config_validation.py => config/validation.py} (100%) delete mode 100644 pyinfra/exception.py delete mode 100644 pyinfra/storage/storages/mock.py rename pyinfra/storage/storages/{interface.py => storage.py} (100%) create mode 100644 pyinfra/storage/utils.py delete mode 100644 pyinfra/utils/compressing.py delete mode 100644 pyinfra/utils/dict.py delete mode 100644 pyinfra/utils/encoding.py delete mode 100644 pyinfra/utils/file_extension_parsing.py create mode 100644 pyinfra/webserver/__init__.py rename pyinfra/{webserver.py => webserver/utils.py} (87%) rename tests/{unit_tests => unit_test}/cipher_test.py (100%) rename tests/{unit_tests/test_knutils.py => unit_test/knutils_logger_test.py} (100%) rename tests/{tests_with_docker_compose => unit_test}/prometheus_monitoring_test.py (95%) rename tests/{tests_with_docker_compose => unit_test}/queue_test.py (100%) rename tests/{tests_with_docker_compose => unit_test}/storage_test.py (74%) delete mode 100644 tests/unit_tests/file_extension_parsing_test.py delete mode 100644 tests/unit_tests/monitor_test.py delete mode 100644 tests/unit_tests/payload_test.py delete mode 100644 tests/unit_tests/processor_test.py diff --git a/pyinfra/config.py b/pyinfra/config.py deleted file mode 100644 index c6cc9c9..0000000 --- a/pyinfra/config.py +++ /dev/null @@ -1,131 +0,0 @@ -import os -from os import environ -from pathlib import Path -from typing import Union - -from dynaconf import Dynaconf - -from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint - - -def read_from_environment(environment_variable_name, default_value): - return environ.get(environment_variable_name, default_value) - - -def normalize_bool(value: Union[str, bool]): - return value if isinstance(value, bool) else value in ["True", "true"] - - -class Config: - def __init__(self): - # Logging level for service logger - self.logging_level_root = read_from_environment("LOGGING_LEVEL_ROOT", "DEBUG") - - # Enables Prometheus monitoring - self.monitoring_enabled = normalize_bool(read_from_environment("MONITORING_ENABLED", True)) - - # Prometheus metric prefix, per convention '{product_name}_{service_name}_{parameter}' - # In the current implementation, the results of a service define the parameter that is monitored, - # i.e. analysis result per image means processing time per image is monitored. - # TODO: add validator since some characters like '-' are not allowed by python prometheus - self.prometheus_metric_prefix = read_from_environment( - "PROMETHEUS_METRIC_PREFIX", "redactmanager_research_service_parameter" - ) - - # Prometheus webserver address and port - self.prometheus_host = "0.0.0.0" - self.prometheus_port = 8080 - - # RabbitMQ host address - self.rabbitmq_host = read_from_environment("RABBITMQ_HOST", "localhost") - - # RabbitMQ host port - self.rabbitmq_port = read_from_environment("RABBITMQ_PORT", "5672") - - # RabbitMQ username - self.rabbitmq_username = read_from_environment("RABBITMQ_USERNAME", "user") - - # RabbitMQ password - self.rabbitmq_password = read_from_environment("RABBITMQ_PASSWORD", "bitnami") - - # Controls AMQP heartbeat timeout in seconds - self.rabbitmq_heartbeat = int(read_from_environment("RABBITMQ_HEARTBEAT", 1)) - - # Controls AMQP connection sleep timer in seconds - # important for heartbeat to come through while main function runs on other thread - self.rabbitmq_connection_sleep = int(read_from_environment("RABBITMQ_CONNECTION_SLEEP", 5)) - - # Queue name for requests to the service - self.request_queue = read_from_environment("REQUEST_QUEUE", "request_queue") - - # Queue name for responses by service - self.response_queue = read_from_environment("RESPONSE_QUEUE", "response_queue") - - # Queue name for failed messages - self.dead_letter_queue = read_from_environment("DEAD_LETTER_QUEUE", "dead_letter_queue") - - # The type of storage to use {s3, azure} - self.storage_backend = read_from_environment("STORAGE_BACKEND", "s3") - - # The bucket / container to pull files specified in queue requests from - if self.storage_backend == "s3": - self.storage_bucket = read_from_environment("STORAGE_BUCKET_NAME", "redaction") - else: - self.storage_bucket = read_from_environment("STORAGE_AZURECONTAINERNAME", "redaction") - - # S3 connection security flag and endpoint - storage_address = read_from_environment("STORAGE_ENDPOINT", "http://127.0.0.1:9000") - self.storage_secure_connection, self.storage_endpoint = validate_and_parse_s3_endpoint(storage_address) - - # User for s3 storage - self.storage_key = read_from_environment("STORAGE_KEY", "root") - - # Password for s3 storage - self.storage_secret = read_from_environment("STORAGE_SECRET", "password") - - # Region for s3 storage - self.storage_region = read_from_environment("STORAGE_REGION", "eu-central-1") - - # Connection string for Azure storage - self.storage_azureconnectionstring = read_from_environment( - "STORAGE_AZURECONNECTIONSTRING", - "DefaultEndpointsProtocol=...", - ) - - # Allowed file types for downloaded and uploaded storage objects that get processed by the service - self.allowed_file_types = ["json", "pdf"] - self.allowed_compression_types = ["gz"] - - self.allowed_processing_parameters = ["operation"] - - # config for x-tenant-endpoint to receive storage connection information per tenant - self.tenant_decryption_public_key = read_from_environment("TENANT_PUBLIC_KEY", "redaction") - self.tenant_endpoint = read_from_environment( - "TENANT_ENDPOINT", "http://tenant-user-management:8081/internal-api/tenants" - ) - - # Value to see if we should write a consumer token to a file - self.write_consumer_token = read_from_environment("WRITE_CONSUMER_TOKEN", "False") - - -def get_config() -> Config: - return Config() - - -def load_settings(): - # TODO: Make dynamic, so that the settings.toml file can be loaded from any location - # TODO: add validation - root_path = Path(__file__).resolve().parents[0] # this is pyinfra/ - repo_root_path = root_path.parents[0] # this is the root of the repo - os.environ["ROOT_PATH"] = str(root_path) - os.environ["REPO_ROOT_PATH"] = str(repo_root_path) - - settings = Dynaconf( - load_dotenv=True, - envvar_prefix=False, - settings_files=[ - repo_root_path / "config" / "settings.toml", - ], - ) - - return settings diff --git a/pyinfra/config/__init__.py b/pyinfra/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py new file mode 100644 index 0000000..a66bbd7 --- /dev/null +++ b/pyinfra/config/loader.py @@ -0,0 +1,23 @@ +import os +from pathlib import Path + +from dynaconf import Dynaconf + + +def load_settings(): + # TODO: Make dynamic, so that the settings.toml file can be loaded from any location + # TODO: add validation + root_path = Path(__file__).resolve().parents[1] # this is pyinfra/ + repo_root_path = root_path.parents[0] # this is the root of the repo + os.environ["ROOT_PATH"] = str(root_path) + os.environ["REPO_ROOT_PATH"] = str(repo_root_path) + + settings = Dynaconf( + load_dotenv=True, + envvar_prefix=False, + settings_files=[ + repo_root_path / "config" / "settings.toml", + ], + ) + + return settings diff --git a/pyinfra/utils/config_validation.py b/pyinfra/config/validation.py similarity index 100% rename from pyinfra/utils/config_validation.py rename to pyinfra/config/validation.py diff --git a/pyinfra/exception.py b/pyinfra/exception.py deleted file mode 100644 index b8d35de..0000000 --- a/pyinfra/exception.py +++ /dev/null @@ -1,5 +0,0 @@ -class ProcessingFailure(RuntimeError): - pass - -class UnknownStorageBackend(Exception): - pass \ No newline at end of file diff --git a/pyinfra/monitor/prometheus.py b/pyinfra/monitor/prometheus.py index 8dc8b01..534aaa5 100644 --- a/pyinfra/monitor/prometheus.py +++ b/pyinfra/monitor/prometheus.py @@ -1,5 +1,5 @@ from time import time -from typing import Sized, Callable, TypeVar +from typing import Callable, TypeVar from dynaconf import Dynaconf from fastapi import FastAPI @@ -7,7 +7,7 @@ from funcy import identity from prometheus_client import generate_latest, CollectorRegistry, REGISTRY, Summary from starlette.responses import Response -from pyinfra.utils.config_validation import validate_settings, prometheus_validators +from pyinfra.config.validation import validate_settings, prometheus_validators def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY) -> FastAPI: diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index 5c5e238..ebef9f3 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -13,7 +13,7 @@ from kn_utils.logging import logger from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry -from pyinfra.utils.config_validation import validate_settings, queue_manager_validators +from pyinfra.config.validation import validate_settings, queue_manager_validators pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index 9c06349..5c51df0 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -1,18 +1,14 @@ -from functools import lru_cache, partial -from typing import Callable +from functools import lru_cache import requests from dynaconf import Dynaconf -from funcy import compose from kn_utils.logging import logger from pyinfra.storage.storages.azure import get_azure_storage_from_settings -from pyinfra.storage.storages.interface import Storage from pyinfra.storage.storages.s3 import get_s3_storage_from_settings +from pyinfra.storage.storages.storage import Storage from pyinfra.utils.cipher import decrypt -from pyinfra.utils.compressing import get_decompressor, get_compressor -from pyinfra.utils.config_validation import validate_settings, storage_validators, multi_tenant_storage_validators -from pyinfra.utils.encoding import get_decoder, get_encoder +from pyinfra.config.validation import validate_settings, storage_validators, multi_tenant_storage_validators def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: @@ -55,7 +51,7 @@ def get_storage_from_tenant_id(tenant_id: str, settings: Dynaconf) -> Storage: if maybe_azure: connection_string = decrypt(public_key, maybe_azure["connectionString"]) backend = "azure" - storage_settings = { + storage_info = { "storage": { "azure": { "connection_string": connection_string, @@ -66,7 +62,7 @@ def get_storage_from_tenant_id(tenant_id: str, settings: Dynaconf) -> Storage: elif maybe_s3: secret = decrypt(public_key, maybe_s3["secret"]) backend = "s3" - storage_settings = { + storage_info = { "storage": { "s3": { "endpoint": maybe_s3["endpoint"], @@ -81,7 +77,7 @@ def get_storage_from_tenant_id(tenant_id: str, settings: Dynaconf) -> Storage: raise Exception(f"Unknown storage backend in {response}.") storage_settings = Dynaconf() - storage_settings.update(settings) + storage_settings.update(storage_info) storage = storage_dispatcher[backend](storage_settings) @@ -94,31 +90,3 @@ storage_dispatcher = { "azure": get_azure_storage_from_settings, "s3": get_s3_storage_from_settings, } - - -@lru_cache(maxsize=10) -def make_downloader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - verify = partial(verify_existence, storage, bucket) - download = partial(storage.get_object, bucket) - decompress = get_decompressor(compression_type) - decode = get_decoder(file_type) - - return compose(decode, decompress, download, verify) - - -@lru_cache(maxsize=10) -def make_uploader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable: - upload = partial(storage.put_object, bucket) - compress = get_compressor(compression_type) - encode = get_encoder(file_type) - - def inner(file_name, file_bytes): - upload(file_name, compose(compress, encode)(file_bytes)) - - return inner - - -def verify_existence(storage: Storage, bucket: str, file_name: str) -> str: - if not storage.exists(file_name): - raise FileNotFoundError(f"{file_name=} name not found on storage in {storage.bucket=}.") - return file_name diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index 1e56630..5689a51 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -7,8 +7,8 @@ from dynaconf import Dynaconf from kn_utils.logging import logger from retry import retry -from pyinfra.storage.storages.interface import Storage -from pyinfra.utils.config_validation import azure_storage_validators, validate_settings +from pyinfra.storage.storages.storage import Storage +from pyinfra.config.validation import azure_storage_validators, validate_settings logging.getLogger("azure").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) diff --git a/pyinfra/storage/storages/mock.py b/pyinfra/storage/storages/mock.py deleted file mode 100644 index 2a77bca..0000000 --- a/pyinfra/storage/storages/mock.py +++ /dev/null @@ -1,39 +0,0 @@ -from pyinfra.storage.storages.interface import Storage - - -class StorageMock(Storage): - def __init__(self, data: bytes = None, file_name: str = None, bucket: str = None): - self.data = data - self.file_name = file_name - self._bucket = bucket - - @property - def bucket(self): - return self._bucket - - def make_bucket(self): - pass - - def has_bucket(self): - return True - - def put_object(self, object_name, data): - self.file_name = object_name - self.data = data - - def exists(self, object_name): - return self.file_name == object_name - - def get_object(self, object_name): - return self.data - - def get_all_objects(self): - raise NotImplementedError - - def clear_bucket(self): - self._bucket = None - self.file_name = None - self.data = None - - def get_all_object_names(self): - raise NotImplementedError diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index dcc151d..c1e725c 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -7,8 +7,8 @@ from kn_utils.logging import logger from minio import Minio from retry import retry -from pyinfra.storage.storages.interface import Storage -from pyinfra.utils.config_validation import validate_settings, s3_storage_validators +from pyinfra.storage.storages.storage import Storage +from pyinfra.config.validation import validate_settings, s3_storage_validators from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint diff --git a/pyinfra/storage/storages/interface.py b/pyinfra/storage/storages/storage.py similarity index 100% rename from pyinfra/storage/storages/interface.py rename to pyinfra/storage/storages/storage.py diff --git a/pyinfra/storage/utils.py b/pyinfra/storage/utils.py new file mode 100644 index 0000000..d125115 --- /dev/null +++ b/pyinfra/storage/utils.py @@ -0,0 +1,106 @@ +import gzip +import json +from typing import Union + +from kn_utils.logging import logger +from pydantic import BaseModel, ValidationError + +from pyinfra.storage.storages.storage import Storage + + +class DossierIdFileIdDownloadPayload(BaseModel): + dossierId: str + fileId: str + targetFileExtension: str + + @property + def targetFilePath(self): + return f"{self.dossierId}/{self.fileId}.{self.targetFileExtension}" + + +class DossierIdFileIdUploadPayload(BaseModel): + dossierId: str + fileId: str + responseFileExtension: str + + @property + def responseFilePath(self): + return f"{self.dossierId}/{self.fileId}.{self.responseFileExtension}" + + +class TargetResponseFilePathDownloadPayload(BaseModel): + targetFilePath: str + + +class TargetResponseFilePathUploadPayload(BaseModel): + responseFilePath: str + + +def download_data_as_specified_in_message(storage: Storage, raw_payload: dict) -> Union[dict, bytes]: + """Convenience function to download a file specified in a message payload. + Supports both legacy and new payload formats. + + If the content is compressed with gzip (.gz), it will be decompressed (-> bytes). + If the content is a json file, it will be decoded (-> dict). + If no file is specified in the payload or the file does not exist in storage, an exception will be raised. + In all other cases, the content will be returned as is (-> bytes). + + This function can be extended in the future as needed (e.g. handling of more file types), but since further + requirements are not specified at this point in time, and it is unclear what these would entail, the code is kept + simple for now to improve readability, maintainability and avoid refactoring efforts of generic solutions that + weren't as generic as they seemed. + + """ + + try: + if "dossierId" in raw_payload: + payload = DossierIdFileIdDownloadPayload(**raw_payload) + else: + payload = TargetResponseFilePathDownloadPayload(**raw_payload) + except ValidationError: + raise ValueError("No download file path found in payload, nothing to download.") + + if not storage.exists(payload.targetFilePath): + raise FileNotFoundError(f"File '{payload.targetFilePath}' does not exist in storage.") + + data = storage.get_object(payload.targetFilePath) + + data = gzip.decompress(data) if ".gz" in payload.targetFilePath else data + data = json.loads(data.decode("utf-8")) if ".json" in payload.targetFilePath else data + + return data + + +def upload_data_as_specified_in_message(storage: Storage, raw_payload: dict, data): + """Convenience function to upload a file specified in a message payload. For now, only json-dump-able data is + supported. The storage json consists of the raw_payload, which is extended with a 'data' key, containing the + data to be uploaded. + + If the content is not a json-dump-able object, an exception will be raised. + If the result file identifier specifies compression with gzip (.gz), it will be compressed before upload. + + This function can be extended in the future as needed (e.g. if we need to upload images), but since further + requirements are not specified at this point in time, and it is unclear what these would entail, the code is kept + simple for now to improve readability, maintainability and avoid refactoring efforts of generic solutions that + weren't as generic as they seemed. + """ + + try: + if "dossierId" in raw_payload: + payload = DossierIdFileIdUploadPayload(**raw_payload) + else: + payload = TargetResponseFilePathUploadPayload(**raw_payload) + except ValidationError: + raise ValueError("No upload file path found in payload, nothing to upload.") + + if ".json" not in payload.responseFilePath: + raise ValueError("Only json-dump-able data can be uploaded.") + + data = {**raw_payload, "data": data} + + data = json.dumps(data).encode("utf-8") + data = gzip.compress(data) if ".gz" in payload.responseFilePath else data + + storage.put_object(payload.responseFilePath, data) + + logger.info(f"Uploaded {payload.responseFilePath} to storage.") diff --git a/pyinfra/utils/compressing.py b/pyinfra/utils/compressing.py deleted file mode 100644 index df23f69..0000000 --- a/pyinfra/utils/compressing.py +++ /dev/null @@ -1,22 +0,0 @@ -import gzip -from typing import Union, Callable - -from funcy import identity - - -def get_decompressor(compression_type: Union[str, None]) -> Callable: - if not compression_type: - return identity - elif "gz" in compression_type: - return gzip.decompress - else: - raise ValueError(f"{compression_type=} is not supported.") - - -def get_compressor(compression_type: str) -> Callable: - if not compression_type: - return identity - elif "gz" in compression_type: - return gzip.compress - else: - raise ValueError(f"{compression_type=} is not supported.") diff --git a/pyinfra/utils/dict.py b/pyinfra/utils/dict.py deleted file mode 100644 index a732a6d..0000000 --- a/pyinfra/utils/dict.py +++ /dev/null @@ -1,5 +0,0 @@ -from funcy import project - - -def safe_project(mapping, keys) -> dict: - return project(mapping, keys) if mapping else {} diff --git a/pyinfra/utils/encoding.py b/pyinfra/utils/encoding.py deleted file mode 100644 index 8cbaa51..0000000 --- a/pyinfra/utils/encoding.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -from typing import Callable - -from funcy import identity - - -def decode_json(data: bytes) -> dict: - return json.loads(data.decode("utf-8")) - - -def encode_json(data: dict) -> bytes: - return json.dumps(data).encode("utf-8") - - -def get_decoder(file_type: str) -> Callable: - if "json" in file_type: - return decode_json - elif "pdf" in file_type: - return identity - else: - raise ValueError(f"{file_type=} is not supported.") - - -def get_encoder(file_type: str) -> Callable: - if "json" in file_type: - return encode_json - else: - raise ValueError(f"{file_type=} is not supported.") diff --git a/pyinfra/utils/file_extension_parsing.py b/pyinfra/utils/file_extension_parsing.py deleted file mode 100644 index 2eb0d3f..0000000 --- a/pyinfra/utils/file_extension_parsing.py +++ /dev/null @@ -1,41 +0,0 @@ -from collections import defaultdict -from typing import Callable - -from funcy import merge - - -def make_file_extension_parser(file_types, compression_types): - ext2_type2ext = make_ext2_type2ext(file_types, compression_types) - ext_to_type2ext = make_ext_to_type2ext(ext2_type2ext) - - def inner(path): - file_extensions = parse_file_extensions(path, ext_to_type2ext) - return file_extensions.get("file_type"), file_extensions.get("compression_type") - - return inner - - -def make_ext2_type2ext(file_type_extensions, compression_type_extensions): - def make_ext_to_ext2type(ext_type): - return lambda ext: {ext_type: ext} - - ext_to_file_type_mapper = make_ext_to_ext2type("file_type") - ext_to_compression_type_mapper = make_ext_to_ext2type("compression_type") - return defaultdict( - lambda: lambda _: {}, - { - **{e: ext_to_file_type_mapper for e in file_type_extensions}, - **{e: ext_to_compression_type_mapper for e in compression_type_extensions}, - }, - ) - - -def make_ext_to_type2ext(ext2_type2ext): - def ext_to_type2ext(ext): - return ext2_type2ext[ext](ext) - - return ext_to_type2ext - - -def parse_file_extensions(path, ext_to_type2ext: Callable): - return merge(*map(ext_to_type2ext, path.split("."))) diff --git a/pyinfra/webserver/__init__.py b/pyinfra/webserver/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyinfra/webserver.py b/pyinfra/webserver/utils.py similarity index 87% rename from pyinfra/webserver.py rename to pyinfra/webserver/utils.py index 0d37139..6bacc54 100644 --- a/pyinfra/webserver.py +++ b/pyinfra/webserver/utils.py @@ -5,7 +5,7 @@ import uvicorn from dynaconf import Dynaconf from fastapi import FastAPI -from pyinfra.utils.config_validation import validate_settings, webserver_validators +from pyinfra.config.validation import validate_settings, webserver_validators def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: diff --git a/pyproject.toml b/pyproject.toml index 3f3b41f..580903b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ requests = "^2.31" minversion = "6.0" addopts = "-ra -q" testpaths = ["tests", "integration"] -norecursedirs = "tests/tests_with_docker_compose" log_cli = 1 log_cli_level = "DEBUG" diff --git a/tests/conftest.py b/tests/conftest.py index 6b16f8c..826afb3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,148 +1,8 @@ -import gzip -import json import pytest -from pyinfra.config import get_config, load_settings -from pyinfra.payload_processing.payload import LegacyQueueMessagePayload, QueueMessagePayload +from pyinfra.config.loader import load_settings @pytest.fixture(scope="session") def settings(): return load_settings() - - -@pytest.fixture -def legacy_payload(x_tenant_id, optional_processing_kwargs): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "dossierId": "test", - "fileId": "test", - "targetFileExtension": "target.json.gz", - "responseFileExtension": "response.json.gz", - **x_tenant_entry, - **optional_processing_kwargs, - } - - -@pytest.fixture -def target_file_path(): - return "test/test.target.json.gz" - - -@pytest.fixture -def response_file_path(): - return "test/test.response.json.gz" - - -@pytest.fixture -def payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "targetFilePath": target_file_path, - "responseFilePath": response_file_path, - **x_tenant_entry, - **optional_processing_kwargs, - } - - -@pytest.fixture -def legacy_queue_response_payload(x_tenant_id, optional_processing_kwargs): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "dossierId": "test", - "fileId": "test", - **x_tenant_entry, - **optional_processing_kwargs, - } - - -@pytest.fixture -def queue_response_payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "targetFilePath": target_file_path, - "responseFilePath": response_file_path, - **x_tenant_entry, - **optional_processing_kwargs, - } - - -@pytest.fixture -def legacy_storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "dossierId": "test", - "fileId": "test", - "targetFileExtension": "target.json.gz", - "responseFileExtension": "response.json.gz", - **x_tenant_entry, - **optional_processing_kwargs, - "data": processing_result_json, - } - - -@pytest.fixture -def storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json, target_file_path, response_file_path): - x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {} - optional_processing_kwargs = optional_processing_kwargs or {} - return { - "targetFilePath": target_file_path, - "responseFilePath": response_file_path, - **x_tenant_entry, - **optional_processing_kwargs, - "data": processing_result_json, - } - - -@pytest.fixture -def legacy_parsed_payload( - x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path -) -> LegacyQueueMessagePayload: - return LegacyQueueMessagePayload( - dossier_id="test", - file_id="test", - x_tenant_id=x_tenant_id, - target_file_extension="target.json.gz", - response_file_extension="response.json.gz", - target_file_type="json", - target_compression_type="gz", - response_file_type="json", - response_compression_type="gz", - target_file_path=target_file_path, - response_file_path=response_file_path, - processing_kwargs=optional_processing_kwargs or {}, - ) - - -@pytest.fixture -def parsed_payload( - x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path -) -> QueueMessagePayload: - return QueueMessagePayload( - x_tenant_id=x_tenant_id, - target_file_type="json", - target_compression_type="gz", - response_file_type="json", - response_compression_type="gz", - target_file_path=target_file_path, - response_file_path=response_file_path, - processing_kwargs=optional_processing_kwargs or {}, - ) - - -@pytest.fixture -def target_json_file() -> bytes: - data = {"target": "test"} - enc_data = json.dumps(data).encode("utf-8") - compr_data = gzip.compress(enc_data) - return compr_data - - -@pytest.fixture -def processing_result_json() -> dict: - return {"response": "test"} diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml index 311192d..c53537c 100644 --- a/tests/docker-compose.yml +++ b/tests/docker-compose.yml @@ -8,7 +8,7 @@ services: - MINIO_ROOT_PASSWORD=password - MINIO_ROOT_USER=root volumes: - - ./data/minio_store:/data + - /tmp/minio_store:/data command: server /data network_mode: "bridge" rabbitmq: diff --git a/tests/unit_tests/cipher_test.py b/tests/unit_test/cipher_test.py similarity index 100% rename from tests/unit_tests/cipher_test.py rename to tests/unit_test/cipher_test.py diff --git a/tests/unit_tests/test_knutils.py b/tests/unit_test/knutils_logger_test.py similarity index 100% rename from tests/unit_tests/test_knutils.py rename to tests/unit_test/knutils_logger_test.py diff --git a/tests/tests_with_docker_compose/prometheus_monitoring_test.py b/tests/unit_test/prometheus_monitoring_test.py similarity index 95% rename from tests/tests_with_docker_compose/prometheus_monitoring_test.py rename to tests/unit_test/prometheus_monitoring_test.py index 75da089..452954c 100644 --- a/tests/tests_with_docker_compose/prometheus_monitoring_test.py +++ b/tests/unit_test/prometheus_monitoring_test.py @@ -6,7 +6,7 @@ import requests from fastapi import FastAPI from pyinfra.monitor.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings -from pyinfra.webserver import create_webserver_thread_from_settings +from pyinfra.webserver.utils import create_webserver_thread_from_settings @pytest.fixture(scope="class") diff --git a/tests/tests_with_docker_compose/queue_test.py b/tests/unit_test/queue_test.py similarity index 100% rename from tests/tests_with_docker_compose/queue_test.py rename to tests/unit_test/queue_test.py diff --git a/tests/tests_with_docker_compose/storage_test.py b/tests/unit_test/storage_test.py similarity index 74% rename from tests/tests_with_docker_compose/storage_test.py rename to tests/unit_test/storage_test.py index 1f97468..515f31c 100644 --- a/tests/tests_with_docker_compose/storage_test.py +++ b/tests/unit_test/storage_test.py @@ -1,11 +1,14 @@ +import gzip +import json from time import sleep import pytest from fastapi import FastAPI from pyinfra.storage.connection import get_storage_from_settings, get_storage_from_tenant_id +from pyinfra.storage.utils import download_data_as_specified_in_message, upload_data_as_specified_in_message from pyinfra.utils.cipher import encrypt -from pyinfra.webserver import create_webserver_thread +from pyinfra.webserver.utils import create_webserver_thread @pytest.fixture(scope="class") @@ -19,41 +22,6 @@ def storage(storage_backend, settings): storage.clear_bucket() -@pytest.fixture(scope="class") -def tenant_server_mock(settings, tenant_server_host, tenant_server_port): - app = FastAPI() - - @app.get("/azure_tenant") - def get_azure_storage_info(): - return { - "azureStorageConnection": { - "connectionString": encrypt( - settings.storage.tenant_server.public_key, settings.storage.azure.connection_string - ), - "containerName": settings.storage.azure.container, - } - } - - @app.get("/s3_tenant") - def get_s3_storage_info(): - return { - "s3StorageConnection": { - "endpoint": settings.storage.s3.endpoint, - "key": settings.storage.s3.key, - "secret": encrypt(settings.storage.tenant_server.public_key, settings.storage.s3.secret), - "region": settings.storage.s3.region, - "bucketName": settings.storage.s3.bucket, - } - } - - thread = create_webserver_thread(app, tenant_server_port, tenant_server_host) - thread.daemon = True - thread.start() - sleep(1) - yield - thread.join(timeout=1) - - @pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="class") class TestStorage: def test_clearing_bucket_yields_empty_bucket(self, storage): @@ -103,6 +71,41 @@ class TestStorage: storage.get_object("folder/file") +@pytest.fixture(scope="class") +def tenant_server_mock(settings, tenant_server_host, tenant_server_port): + app = FastAPI() + + @app.get("/azure_tenant") + def get_azure_storage_info(): + return { + "azureStorageConnection": { + "connectionString": encrypt( + settings.storage.tenant_server.public_key, settings.storage.azure.connection_string + ), + "containerName": settings.storage.azure.container, + } + } + + @app.get("/s3_tenant") + def get_s3_storage_info(): + return { + "s3StorageConnection": { + "endpoint": settings.storage.s3.endpoint, + "key": settings.storage.s3.key, + "secret": encrypt(settings.storage.tenant_server.public_key, settings.storage.s3.secret), + "region": settings.storage.s3.region, + "bucketName": settings.storage.s3.bucket, + } + } + + thread = create_webserver_thread(app, tenant_server_port, tenant_server_host) + thread.daemon = True + thread.start() + sleep(1) + yield + thread.join(timeout=1) + + @pytest.mark.parametrize("tenant_id", ["azure_tenant", "s3_tenant"], scope="class") @pytest.mark.parametrize("tenant_server_host", ["localhost"], scope="class") @pytest.mark.parametrize("tenant_server_port", [8000], scope="class") @@ -117,3 +120,39 @@ class TestMultiTenantStorage: data_received = storage.get_object("file") assert b"content" == data_received + + +@pytest.fixture +def payload(payload_type): + if payload_type == "target_response_file_path": + return { + "targetFilePath": "test/file.target.json.gz", + "responseFilePath": "test/file.response.json.gz", + } + elif payload_type == "dossier_id_file_id": + return { + "dossierId": "test", + "fileId": "file", + "targetFileExtension": "target.json.gz", + "responseFileExtension": "response.json.gz", + } + + +@pytest.mark.parametrize("payload_type", ["target_response_file_path", "dossier_id_file_id"], scope="class") +@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="class") +class TestDownloadAndUploadFromMessage: + def test_download_and_upload_from_message(self, storage, payload): + storage.clear_bucket() + + input_data = {"data": "success"} + + storage.put_object("test/file.target.json.gz", gzip.compress(json.dumps(input_data).encode())) + + data = download_data_as_specified_in_message(storage, payload) + + assert data == input_data + + upload_data_as_specified_in_message(storage, payload, input_data) + data = json.loads(gzip.decompress(storage.get_object("test/file.response.json.gz")).decode()) + + assert data == {**payload, "data": input_data} diff --git a/tests/unit_tests/file_extension_parsing_test.py b/tests/unit_tests/file_extension_parsing_test.py deleted file mode 100644 index ef4741f..0000000 --- a/tests/unit_tests/file_extension_parsing_test.py +++ /dev/null @@ -1,32 +0,0 @@ -import pytest - -from pyinfra.utils.file_extension_parsing import make_file_extension_parser - - -@pytest.fixture -def file_extension_parser(file_types, compression_types): - return make_file_extension_parser(file_types, compression_types) - - -@pytest.mark.parametrize( - "file_path,file_types,compression_types,expected_file_extension,expected_compression_extension", - [ - ("test.txt", ["txt"], ["gz"], "txt", None), - ("test.txt.gz", ["txt"], ["gz"], "txt", "gz"), - ("test.txt.gz", [], [], None, None), - ("test.txt.gz", ["txt"], [], "txt", None), - ("test.txt.gz", [], ["gz"], None, "gz"), - ("test", ["txt"], ["gz"], None, None), - ], -) -def test_file_extension_parsing( - file_extension_parser, - file_path, - file_types, - compression_types, - expected_file_extension, - expected_compression_extension, -): - file_extension, compression_extension = file_extension_parser(file_path) - assert file_extension == expected_file_extension - assert compression_extension == expected_compression_extension diff --git a/tests/unit_tests/monitor_test.py b/tests/unit_tests/monitor_test.py deleted file mode 100644 index fad8e6a..0000000 --- a/tests/unit_tests/monitor_test.py +++ /dev/null @@ -1,44 +0,0 @@ -import re -import time - -import pytest -import requests - -from pyinfra.payload_processing.monitor import PrometheusMonitor - - -@pytest.fixture(scope="class") -def monitored_mock_function(metric_prefix, host, port): - def process(data=None): - time.sleep(2) - return ["result1", "result2", "result3"] - - monitor = PrometheusMonitor(metric_prefix, host, port) - return monitor(process) - - -@pytest.fixture -def metric_endpoint(host, port): - return f"http://{host}:{port}/prometheus" - - -@pytest.mark.parametrize("metric_prefix, host, port", [("test", "0.0.0.0", 8000)], scope="class") -class TestPrometheusMonitor: - def test_prometheus_endpoint_is_available(self, metric_endpoint, monitored_mock_function): - resp = requests.get(metric_endpoint) - assert resp.status_code == 200 - - def test_processing_with_a_monitored_fn_increases_parameter_counter( - self, - metric_endpoint, - metric_prefix, - monitored_mock_function, - ): - monitored_mock_function(data=None) - resp = requests.get(metric_endpoint) - pattern = re.compile(rf".*{metric_prefix}_processing_time_count (\d\.\d).*") - assert pattern.search(resp.text).group(1) == "1.0" - - monitored_mock_function(data=None) - resp = requests.get(metric_endpoint) - assert pattern.search(resp.text).group(1) == "2.0" diff --git a/tests/unit_tests/payload_test.py b/tests/unit_tests/payload_test.py deleted file mode 100644 index 037a53a..0000000 --- a/tests/unit_tests/payload_test.py +++ /dev/null @@ -1,48 +0,0 @@ -import pytest - -from pyinfra.config import get_config -from pyinfra.payload_processing.payload import ( - get_queue_message_payload_parser, - format_to_queue_message_response_body, - format_service_processing_result_for_storage, -) - - -@pytest.fixture -def payload_parser(): - config = get_config() - return get_queue_message_payload_parser(config) - - -@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) -@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) -class TestPayloadParsing: - def test_legacy_payload_parsing(self, payload_parser, legacy_payload, legacy_parsed_payload): - parsed_payload = payload_parser(legacy_payload) - assert parsed_payload == legacy_parsed_payload - - def test_payload_parsing(self, payload_parser, payload, parsed_payload): - parsed_payload = payload_parser(payload) - assert parsed_payload == parsed_payload - - -@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) -@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) -class TestPayloadFormatting: - def test_legacy_payload_formatting_for_response(self, legacy_parsed_payload, legacy_queue_response_payload): - formatted_payload = format_to_queue_message_response_body(legacy_parsed_payload) - assert formatted_payload == legacy_queue_response_payload - - def test_payload_formatting_for_response(self, parsed_payload, queue_response_payload): - formatted_payload = format_to_queue_message_response_body(parsed_payload) - assert formatted_payload == queue_response_payload - - def test_legacy_payload_formatting_for_storage( - self, legacy_parsed_payload, processing_result_json, legacy_storage_payload - ): - formatted_payload = format_service_processing_result_for_storage(legacy_parsed_payload, processing_result_json) - assert formatted_payload == legacy_storage_payload - - def test_payload_formatting_for_storage(self, parsed_payload, processing_result_json, storage_payload): - formatted_payload = format_service_processing_result_for_storage(parsed_payload, processing_result_json) - assert formatted_payload == storage_payload diff --git a/tests/unit_tests/processor_test.py b/tests/unit_tests/processor_test.py deleted file mode 100644 index cf9a7df..0000000 --- a/tests/unit_tests/processor_test.py +++ /dev/null @@ -1,81 +0,0 @@ -import gzip -import json -import pytest - -from pyinfra.config import get_config -from pyinfra.payload_processing.payload import get_queue_message_payload_parser -from pyinfra.payload_processing.processor import PayloadProcessor -from pyinfra.storage.storage_info import StorageInfo -from pyinfra.storage.storage_provider import StorageProviderMock -from pyinfra.storage.storages.mock import StorageMock - - -@pytest.fixture -def bucket_name(): - return "test_bucket" - - -@pytest.fixture -def storage_mock(target_json_file, target_file_path, bucket_name): - storage = StorageMock(target_json_file, target_file_path, bucket_name) - return storage - - -@pytest.fixture -def storage_info_mock(bucket_name): - return StorageInfo(bucket_name) - - -@pytest.fixture -def data_processor_mock(processing_result_json): - def inner(data, **kwargs): - return processing_result_json - - return inner - - -@pytest.fixture -def payload_processor(storage_mock, storage_info_mock, data_processor_mock): - storage_provider = StorageProviderMock(storage_mock, storage_info_mock) - payload_parser = get_queue_message_payload_parser(get_config()) - return PayloadProcessor(storage_provider, payload_parser, data_processor_mock) - - -@pytest.mark.parametrize("x_tenant_id", [None, "klaus"]) -@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}]) -class TestPayloadProcessor: - def test_payload_processor_yields_correct_response_and_uploads_result_for_legacy_message( - self, - payload_processor, - storage_mock, - bucket_name, - response_file_path, - legacy_payload, - legacy_queue_response_payload, - legacy_storage_payload, - ): - response = payload_processor(legacy_payload) - - assert response == legacy_queue_response_payload - - data_stored = storage_mock.get_object(bucket_name, response_file_path) - - assert json.loads(gzip.decompress(data_stored).decode()) == legacy_storage_payload - - def test_payload_processor_yields_correct_response_and_uploads_result( - self, - payload_processor, - storage_mock, - bucket_name, - response_file_path, - payload, - queue_response_payload, - storage_payload, - ): - response = payload_processor(payload) - - assert response == queue_response_payload - - data_stored = storage_mock.get_object(bucket_name, response_file_path) - - assert json.loads(gzip.decompress(data_stored).decode()) == storage_payload From b7f860f36becac83fa22a6f1a66c485ad7b8a09d Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 18 Jan 2024 17:10:04 +0100 Subject: [PATCH 12/39] WIP: add callback factory and update example scripts --- pyinfra/monitor/prometheus.py | 2 +- pyinfra/payload_processing/__init__.py | 0 pyinfra/payload_processing/payload.py | 199 ------------------------ pyinfra/payload_processing/processor.py | 97 ------------ pyinfra/queue/callback.py | 36 +++++ pyinfra/queue/manager.py | 3 +- pyinfra/storage/connection.py | 2 +- pyinfra/storage/storages/s3.py | 2 +- pyinfra/webserver/utils.py | 2 +- scripts/send_request.py | 44 +++--- scripts/start_pyinfra.py | 34 +++- tests/conftest.py | 12 ++ tests/unit_test/storage_test.py | 13 +- 13 files changed, 102 insertions(+), 344 deletions(-) delete mode 100644 pyinfra/payload_processing/__init__.py delete mode 100644 pyinfra/payload_processing/payload.py delete mode 100644 pyinfra/payload_processing/processor.py create mode 100644 pyinfra/queue/callback.py diff --git a/pyinfra/monitor/prometheus.py b/pyinfra/monitor/prometheus.py index 534aaa5..f9bddd4 100644 --- a/pyinfra/monitor/prometheus.py +++ b/pyinfra/monitor/prometheus.py @@ -7,7 +7,7 @@ from funcy import identity from prometheus_client import generate_latest, CollectorRegistry, REGISTRY, Summary from starlette.responses import Response -from pyinfra.config.validation import validate_settings, prometheus_validators +from pyinfra.config.validation import prometheus_validators, validate_settings def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY) -> FastAPI: diff --git a/pyinfra/payload_processing/__init__.py b/pyinfra/payload_processing/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyinfra/payload_processing/payload.py b/pyinfra/payload_processing/payload.py deleted file mode 100644 index debf69f..0000000 --- a/pyinfra/payload_processing/payload.py +++ /dev/null @@ -1,199 +0,0 @@ -from dataclasses import dataclass -from functools import singledispatch, partial -from funcy import project, complement -from itertools import chain -from operator import itemgetter -from typing import Union, Sized, Callable, List - -from pyinfra.config import Config -from pyinfra.utils.file_extension_parsing import make_file_extension_parser - - -@dataclass -class QueueMessagePayload: - """Default one-to-one payload, where the message contains the absolute file paths for the target and response files, - that have to be acquired from the storage.""" - - target_file_path: str - response_file_path: str - - target_file_type: Union[str, None] - target_compression_type: Union[str, None] - response_file_type: Union[str, None] - response_compression_type: Union[str, None] - - x_tenant_id: Union[str, None] - - processing_kwargs: dict - - -@dataclass -class LegacyQueueMessagePayload(QueueMessagePayload): - """Legacy one-to-one payload, where the message contains the dossier and file ids, and the file extensions that have - to be used to construct the absolute file paths for the target and response files, that have to be acquired from the - storage.""" - - dossier_id: str - file_id: str - - target_file_extension: str - response_file_extension: str - - -class QueueMessagePayloadParser: - def __init__(self, payload_matcher2parse_strategy: dict): - self.payload_matcher2parse_strategy = payload_matcher2parse_strategy - - def __call__(self, payload: dict) -> QueueMessagePayload: - for payload_matcher, parse_strategy in self.payload_matcher2parse_strategy.items(): - if payload_matcher(payload): - return parse_strategy(payload) - - -def get_queue_message_payload_parser(config: Config) -> QueueMessagePayloadParser: - file_extension_parser = make_file_extension_parser(config.allowed_file_types, config.allowed_compression_types) - - payload_matcher2parse_strategy = get_payload_matcher2parse_strategy( - file_extension_parser, config.allowed_processing_parameters - ) - - return QueueMessagePayloadParser(payload_matcher2parse_strategy) - - -def get_payload_matcher2parse_strategy(parse_file_extensions: Callable, allowed_processing_parameters: List[str]): - return { - is_legacy_payload: partial( - parse_legacy_queue_message_payload, - parse_file_extensions=parse_file_extensions, - allowed_processing_parameters=allowed_processing_parameters, - ), - complement(is_legacy_payload): partial( - parse_queue_message_payload, - parse_file_extensions=parse_file_extensions, - allowed_processing_parameters=allowed_processing_parameters, - ), - } - - -def is_legacy_payload(payload: dict) -> bool: - return {"dossierId", "fileId", "targetFileExtension", "responseFileExtension"}.issubset(payload.keys()) - - -def parse_queue_message_payload( - payload: dict, - parse_file_extensions: Callable, - allowed_processing_parameters: List[str], -) -> QueueMessagePayload: - target_file_path, response_file_path = itemgetter("targetFilePath", "responseFilePath")(payload) - - target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( - map(parse_file_extensions, [target_file_path, response_file_path]) - ) - - x_tenant_id = payload.get("X-TENANT-ID") - - processing_kwargs = project(payload, allowed_processing_parameters) - - return QueueMessagePayload( - target_file_path=target_file_path, - response_file_path=response_file_path, - target_file_type=target_file_type, - target_compression_type=target_compression_type, - response_file_type=response_file_type, - response_compression_type=response_compression_type, - x_tenant_id=x_tenant_id, - processing_kwargs=processing_kwargs, - ) - - -def parse_legacy_queue_message_payload( - payload: dict, - parse_file_extensions: Callable, - allowed_processing_parameters: List[str], -) -> LegacyQueueMessagePayload: - dossier_id, file_id, target_file_extension, response_file_extension = itemgetter( - "dossierId", "fileId", "targetFileExtension", "responseFileExtension" - )(payload) - - target_file_path = f"{dossier_id}/{file_id}.{target_file_extension}" - response_file_path = f"{dossier_id}/{file_id}.{response_file_extension}" - - target_file_type, target_compression_type, response_file_type, response_compression_type = chain.from_iterable( - map(parse_file_extensions, [target_file_extension, response_file_extension]) - ) - - x_tenant_id = payload.get("X-TENANT-ID") - - processing_kwargs = project(payload, allowed_processing_parameters) - - return LegacyQueueMessagePayload( - dossier_id=dossier_id, - file_id=file_id, - x_tenant_id=x_tenant_id, - target_file_extension=target_file_extension, - response_file_extension=response_file_extension, - target_file_type=target_file_type, - target_compression_type=target_compression_type, - response_file_type=response_file_type, - response_compression_type=response_compression_type, - target_file_path=target_file_path, - response_file_path=response_file_path, - processing_kwargs=processing_kwargs, - ) - - -@singledispatch -def format_service_processing_result_for_storage(payload: QueueMessagePayload, result: Sized) -> dict: - raise NotImplementedError("Unsupported payload type") - - -@format_service_processing_result_for_storage.register(LegacyQueueMessagePayload) -def _(payload: LegacyQueueMessagePayload, result: Sized) -> dict: - processing_kwargs = payload.processing_kwargs or {} - x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} - return { - "dossierId": payload.dossier_id, - "fileId": payload.file_id, - "targetFileExtension": payload.target_file_extension, - "responseFileExtension": payload.response_file_extension, - **x_tenant_id, - **processing_kwargs, - "data": result, - } - - -@format_service_processing_result_for_storage.register(QueueMessagePayload) -def _(payload: QueueMessagePayload, result: Sized) -> dict: - processing_kwargs = payload.processing_kwargs or {} - x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} - return { - "targetFilePath": payload.target_file_path, - "responseFilePath": payload.response_file_path, - **x_tenant_id, - **processing_kwargs, - "data": result, - } - - -@singledispatch -def format_to_queue_message_response_body(queue_message_payload: QueueMessagePayload) -> dict: - raise NotImplementedError("Unsupported payload type") - - -@format_to_queue_message_response_body.register(LegacyQueueMessagePayload) -def _(payload: LegacyQueueMessagePayload) -> dict: - processing_kwargs = payload.processing_kwargs or {} - x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} - return {"dossierId": payload.dossier_id, "fileId": payload.file_id, **x_tenant_id, **processing_kwargs} - - -@format_to_queue_message_response_body.register(QueueMessagePayload) -def _(payload: QueueMessagePayload) -> dict: - processing_kwargs = payload.processing_kwargs or {} - x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {} - return { - "targetFilePath": payload.target_file_path, - "responseFilePath": payload.response_file_path, - **x_tenant_id, - **processing_kwargs, - } diff --git a/pyinfra/payload_processing/processor.py b/pyinfra/payload_processing/processor.py deleted file mode 100644 index 4fcea1d..0000000 --- a/pyinfra/payload_processing/processor.py +++ /dev/null @@ -1,97 +0,0 @@ -from kn_utils.logging import logger -from dataclasses import asdict -from typing import Callable, List - -from pyinfra.config import get_config, Config -from pyinfra.payload_processing.monitor import get_monitor_from_config -from pyinfra.payload_processing.payload import ( - QueueMessagePayloadParser, - get_queue_message_payload_parser, - format_service_processing_result_for_storage, - format_to_queue_message_response_body, - QueueMessagePayload, -) -from pyinfra.storage.storage import make_downloader, make_uploader -from pyinfra.storage.storage_provider import StorageProvider - - -class PayloadProcessor: - def __init__( - self, - storage_provider: StorageProvider, - payload_parser: QueueMessagePayloadParser, - data_processor: Callable, - ): - """Wraps an analysis function specified by a service (e.g. NER service) in pre- and post-processing steps. - - Args: - storage_provider: Storage manager that connects to the storage, using the tenant id if provided - payload_parser: Parser that translates the queue message payload to the required QueueMessagePayload object - data_processor: The analysis function to be called with the downloaded file - NOTE: The result of the analysis function has to be an instance of `Sized`, e.g. a dict or a list to be - able to upload it and to be able to monitor the processing time. - """ - self.parse_payload = payload_parser - self.provide_storage = storage_provider - self.process_data = data_processor - - def __call__(self, queue_message_payload: dict) -> dict: - """Processes a queue message payload. - - The steps executed are: - 1. Download the file specified in the message payload from the storage - 2. Process the file with the analysis function - 3. Upload the result to the storage - 4. Return the payload for a response queue message - - Args: - queue_message_payload: The payload of a queue message. The payload is expected to be a dict with the - following keys: - targetFilePath, responseFilePath - OR - dossierId, fileId, targetFileExtension, responseFileExtension - - Returns: - The payload for a response queue message, containing only the request payload. - """ - return self._process(queue_message_payload) - - def _process(self, queue_message_payload: dict) -> dict: - payload: QueueMessagePayload = self.parse_payload(queue_message_payload) - - logger.info(f"Processing {payload.__class__.__name__} ...") - logger.debug(f"Payload contents: {asdict(payload)} ...") - - storage, storage_info = self.provide_storage(payload.x_tenant_id) - - download_file_to_process = make_downloader( - storage, storage_info.bucket_name, payload.target_file_type, payload.target_compression_type - ) - upload_processing_result = make_uploader( - storage, storage_info.bucket_name, payload.response_file_type, payload.response_compression_type - ) - - data = download_file_to_process(payload.target_file_path) - result: List[dict] = self.process_data(data, **payload.processing_kwargs) - formatted_result = format_service_processing_result_for_storage(payload, result) - - upload_processing_result(payload.response_file_path, formatted_result) - - return format_to_queue_message_response_body(payload) - - -def make_payload_processor(data_processor: Callable, config: Config = None) -> PayloadProcessor: - """Creates a payload processor.""" - config = config or get_config() - - storage_provider = StorageProvider(config) - monitor = get_monitor_from_config(config) - payload_parser: QueueMessagePayloadParser = get_queue_message_payload_parser(config) - - data_processor = monitor(data_processor) - - return PayloadProcessor( - storage_provider, - payload_parser, - data_processor, - ) diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py new file mode 100644 index 0000000..cedf686 --- /dev/null +++ b/pyinfra/queue/callback.py @@ -0,0 +1,36 @@ +from typing import Callable, Union + +from dynaconf import Dynaconf +from kn_utils.logging import logger + +from pyinfra.storage.connection import get_storage +from pyinfra.storage.utils import download_data_as_specified_in_message, upload_data_as_specified_in_message + +DataProcessor = Callable[[Union[dict, bytes], dict], dict] + + +def make_payload_processor(data_processor: DataProcessor, settings: Dynaconf): + """Default callback for processing queue messages. + Data will be downloaded from the storage as specified in the message. If a tenant id is specified, the storage + will be configured to use that tenant id, otherwise the storage is configured as specified in the settings. + The data is the passed to the dataprocessor, together with the message. The dataprocessor should return a + json-dump-able object. This object is then uploaded to the storage as specified in the message. + + The response message is just the original message. + Adapt as needed. + """ + + def inner(queue_message_payload: dict) -> dict: + logger.info(f"Processing payload...") + + storage = get_storage(settings, queue_message_payload.get("X-TENANT-ID")) + + data = download_data_as_specified_in_message(storage, queue_message_payload) + + result = data_processor(data, queue_message_payload) + + upload_data_as_specified_in_message(storage, queue_message_payload, result) + + return queue_message_payload + + return inner \ No newline at end of file diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index ebef9f3..d91e0be 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -13,7 +13,7 @@ from kn_utils.logging import logger from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry -from pyinfra.config.validation import validate_settings, queue_manager_validators +from pyinfra.config.validation import queue_manager_validators, validate_settings pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter @@ -175,6 +175,7 @@ class QueueManager: result, properties=pika.BasicProperties(headers=filtered_message_headers), ) + # FIXME: publish doesnt work in example script, explore, adapt, overcome logger.info(f"Published result to queue {self.output_queue}.") channel.basic_ack(delivery_tag=method.delivery_tag) diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index 5c51df0..b7c5845 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -8,7 +8,7 @@ from pyinfra.storage.storages.azure import get_azure_storage_from_settings from pyinfra.storage.storages.s3 import get_s3_storage_from_settings from pyinfra.storage.storages.storage import Storage from pyinfra.utils.cipher import decrypt -from pyinfra.config.validation import validate_settings, storage_validators, multi_tenant_storage_validators +from pyinfra.config.validation import storage_validators, multi_tenant_storage_validators, validate_settings def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index c1e725c..52f3957 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -8,7 +8,7 @@ from minio import Minio from retry import retry from pyinfra.storage.storages.storage import Storage -from pyinfra.config.validation import validate_settings, s3_storage_validators +from pyinfra.config.validation import s3_storage_validators, validate_settings from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index 6bacc54..c72c40e 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -5,7 +5,7 @@ import uvicorn from dynaconf import Dynaconf from fastapi import FastAPI -from pyinfra.config.validation import validate_settings, webserver_validators +from pyinfra.config.validation import webserver_validators, validate_settings def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: diff --git a/scripts/send_request.py b/scripts/send_request.py index a30f725..79e6a4c 100644 --- a/scripts/send_request.py +++ b/scripts/send_request.py @@ -1,22 +1,20 @@ import gzip import json -import logging from operator import itemgetter import pika +from kn_utils.logging import logger -from pyinfra.config import get_config -from pyinfra.queue.development_queue_manager import DevelopmentQueueManager -from pyinfra.storage.storages.s3 import get_s3_storage_from_config +from pyinfra.config.loader import load_settings +from pyinfra.queue.manager import QueueManager +from pyinfra.storage.storages.s3 import get_s3_storage_from_settings -CONFIG = get_config() -logging.basicConfig() -logger = logging.getLogger() -logger.setLevel(logging.INFO) +settings = load_settings() def upload_json_and_make_message_body(): - bucket = CONFIG.storage_bucket + bucket = settings.storage.s3.bucket + dossier_id, file_id, suffix = "dossier", "file", "json.gz" content = { "numberOfPages": 7, @@ -26,10 +24,10 @@ def upload_json_and_make_message_body(): object_name = f"{dossier_id}/{file_id}.{suffix}" data = gzip.compress(json.dumps(content).encode("utf-8")) - storage = get_s3_storage_from_config(CONFIG) - if not storage.has_bucket(bucket): - storage.make_bucket(bucket) - storage.put_object(bucket, object_name, data) + storage = get_s3_storage_from_settings(settings) + if not storage.has_bucket(): + storage.make_bucket() + storage.put_object(object_name, data) message_body = { "dossierId": dossier_id, @@ -41,31 +39,31 @@ def upload_json_and_make_message_body(): def main(): - development_queue_manager = DevelopmentQueueManager(CONFIG) - development_queue_manager.clear_queues() + queue_manager = QueueManager(settings) + queue_manager.purge_queues() message = upload_json_and_make_message_body() - development_queue_manager.publish_request(message, pika.BasicProperties(headers={"X-TENANT-ID": "redaction"})) - logger.info(f"Put {message} on {CONFIG.request_queue}") + queue_manager.publish_message_to_input_queue(message) + logger.info(f"Put {message} on {settings.rabbitmq.input_queue}.") - storage = get_s3_storage_from_config(CONFIG) - for method_frame, properties, body in development_queue_manager._channel.consume( - queue=CONFIG.response_queue, inactivity_timeout=15 + storage = get_s3_storage_from_settings(settings) + for method_frame, properties, body in queue_manager.channel.consume( + queue=settings.rabbitmq.output_queue, inactivity_timeout=15 ): if not body: break response = json.loads(body) logger.info(f"Received {response}") logger.info(f"Message headers: {properties.headers}") - development_queue_manager._channel.basic_ack(method_frame.delivery_tag) + queue_manager.channel.basic_ack(method_frame.delivery_tag) dossier_id, file_id = itemgetter("dossierId", "fileId")(response) suffix = message["responseFileExtension"] print(f"{dossier_id}/{file_id}.{suffix}") - result = storage.get_object(CONFIG.storage_bucket, f"{dossier_id}/{file_id}.{suffix}") + result = storage.get_object(f"{dossier_id}/{file_id}.{suffix}") result = json.loads(gzip.decompress(result)) logger.info(f"Contents of result on storage: {result}") - development_queue_manager.close_channel() + queue_manager.stop_consuming() if __name__ == "__main__": diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 31b3a3a..0b1c4ec 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,24 +1,42 @@ import logging import time -from pyinfra.config import get_config -from pyinfra.payload_processing.processor import make_payload_processor -from pyinfra.queue.queue_manager import QueueManager +from fastapi import FastAPI + +from pyinfra.config.loader import load_settings +from pyinfra.monitor.prometheus import make_prometheus_processing_time_decorator_from_settings, add_prometheus_endpoint +from pyinfra.queue.callback import make_payload_processor +from pyinfra.queue.manager import QueueManager +from pyinfra.webserver.utils import create_webserver_thread_from_settings logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) +settings = load_settings() -def json_processor_mock(data: dict): + +@make_prometheus_processing_time_decorator_from_settings(settings) +def json_processor_mock(_data: dict, _message: dict) -> dict: time.sleep(5) - return [{"result1": "result1"}, {"result2": "result2"}] + return {"result1": "result1"} def main(): - logger.info("Start consuming...") - queue_manager = QueueManager(get_config()) - queue_manager.start_consuming(make_payload_processor(json_processor_mock)) + app = FastAPI() + app = add_prometheus_endpoint(app) + + queue_manager = QueueManager(settings) + + @app.get("/ready") + @app.get("/health") + def check_health(): + return queue_manager.is_ready() + + webserver_thread = create_webserver_thread_from_settings(app, settings) + webserver_thread.start() + callback = make_payload_processor(json_processor_mock, settings) + queue_manager.start_consuming(callback) if __name__ == "__main__": diff --git a/tests/conftest.py b/tests/conftest.py index 826afb3..81750c8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,20 @@ import pytest from pyinfra.config.loader import load_settings +from pyinfra.storage.connection import get_storage_from_settings @pytest.fixture(scope="session") def settings(): return load_settings() + + +@pytest.fixture(scope="class") +def storage(storage_backend, settings): + settings.storage.backend = storage_backend + + storage = get_storage_from_settings(settings) + storage.make_bucket() + + yield storage + storage.clear_bucket() diff --git a/tests/unit_test/storage_test.py b/tests/unit_test/storage_test.py index 515f31c..365d066 100644 --- a/tests/unit_test/storage_test.py +++ b/tests/unit_test/storage_test.py @@ -5,23 +5,12 @@ from time import sleep import pytest from fastapi import FastAPI -from pyinfra.storage.connection import get_storage_from_settings, get_storage_from_tenant_id +from pyinfra.storage.connection import get_storage_from_tenant_id from pyinfra.storage.utils import download_data_as_specified_in_message, upload_data_as_specified_in_message from pyinfra.utils.cipher import encrypt from pyinfra.webserver.utils import create_webserver_thread -@pytest.fixture(scope="class") -def storage(storage_backend, settings): - settings.storage.backend = storage_backend - - storage = get_storage_from_settings(settings) - storage.make_bucket() - - yield storage - storage.clear_bucket() - - @pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="class") class TestStorage: def test_clearing_bucket_yields_empty_bucket(self, storage): From fbbfc553ae4d8274a027ba2b0566fbd9d9f6bc69 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 19 Jan 2024 08:53:36 +0100 Subject: [PATCH 13/39] fix message encoding for response, rename some functions --- pyinfra/queue/callback.py | 6 +++--- pyinfra/queue/manager.py | 3 +-- pyinfra/webserver/utils.py | 7 ++++++- scripts/start_pyinfra.py | 10 +++++++--- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py index cedf686..3fa8f22 100644 --- a/pyinfra/queue/callback.py +++ b/pyinfra/queue/callback.py @@ -9,7 +9,7 @@ from pyinfra.storage.utils import download_data_as_specified_in_message, upload_ DataProcessor = Callable[[Union[dict, bytes], dict], dict] -def make_payload_processor(data_processor: DataProcessor, settings: Dynaconf): +def make_queue_message_callback(data_processor: DataProcessor, settings: Dynaconf): """Default callback for processing queue messages. Data will be downloaded from the storage as specified in the message. If a tenant id is specified, the storage will be configured to use that tenant id, otherwise the storage is configured as specified in the settings. @@ -19,7 +19,7 @@ def make_payload_processor(data_processor: DataProcessor, settings: Dynaconf): The response message is just the original message. Adapt as needed. """ - + def inner(queue_message_payload: dict) -> dict: logger.info(f"Processing payload...") @@ -33,4 +33,4 @@ def make_payload_processor(data_processor: DataProcessor, settings: Dynaconf): return queue_message_payload - return inner \ No newline at end of file + return inner diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index d91e0be..34d5313 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -172,10 +172,9 @@ class QueueManager: channel.basic_publish( "", self.output_queue, - result, + json.dumps(result).encode(), properties=pika.BasicProperties(headers=filtered_message_headers), ) - # FIXME: publish doesnt work in example script, explore, adapt, overcome logger.info(f"Published result to queue {self.output_queue}.") channel.basic_ack(delivery_tag=method.delivery_tag) diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index c72c40e..70a9c2b 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -15,4 +15,9 @@ def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> t def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thread: - return threading.Thread(target=lambda: uvicorn.run(app, port=port, host=host, log_level=logging.WARNING)) + """Creates a thread that runs a FastAPI webserver. Start with thread.start(), and join with thread.join(). + Note that the thread is a daemon thread, so it will be terminated when the main thread is terminated. + """ + thread = threading.Thread(target=lambda: uvicorn.run(app, port=port, host=host, log_level=logging.WARNING)) + thread.daemon = True + return thread diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 0b1c4ec..ea959b5 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -5,7 +5,7 @@ from fastapi import FastAPI from pyinfra.config.loader import load_settings from pyinfra.monitor.prometheus import make_prometheus_processing_time_decorator_from_settings, add_prometheus_endpoint -from pyinfra.queue.callback import make_payload_processor +from pyinfra.queue.callback import make_queue_message_callback from pyinfra.queue.manager import QueueManager from pyinfra.webserver.utils import create_webserver_thread_from_settings @@ -31,11 +31,15 @@ def main(): @app.get("/ready") @app.get("/health") def check_health(): - return queue_manager.is_ready() + if queue_manager.is_ready(): + return {"status": "OK"}, 200 + else: + return {"status": "Service Unavailable"}, 503 webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() - callback = make_payload_processor(json_processor_mock, settings) + + callback = make_queue_message_callback(json_processor_mock, settings) queue_manager.start_consuming(callback) From 9c2f34e69478157875709e9078146e972972c8cd Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 19 Jan 2024 13:02:47 +0100 Subject: [PATCH 14/39] Put add health check in own function --- pyinfra/monitor/__init__.py | 0 pyinfra/{monitor => webserver}/prometheus.py | 0 pyinfra/webserver/utils.py | 20 +++++++++++++++++++ scripts/start_pyinfra.py | 12 +++-------- tests/unit_test/prometheus_monitoring_test.py | 2 +- tests/unit_test/queue_test.py | 8 ++++---- 6 files changed, 28 insertions(+), 14 deletions(-) delete mode 100644 pyinfra/monitor/__init__.py rename pyinfra/{monitor => webserver}/prometheus.py (100%) diff --git a/pyinfra/monitor/__init__.py b/pyinfra/monitor/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyinfra/monitor/prometheus.py b/pyinfra/webserver/prometheus.py similarity index 100% rename from pyinfra/monitor/prometheus.py rename to pyinfra/webserver/prometheus.py diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index 70a9c2b..fc7534b 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -1,5 +1,6 @@ import logging import threading +from typing import Callable import uvicorn from dynaconf import Dynaconf @@ -21,3 +22,22 @@ def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thr thread = threading.Thread(target=lambda: uvicorn.run(app, port=port, host=host, log_level=logging.WARNING)) thread.daemon = True return thread + + +HealthFunction = Callable[[], bool] + + +def add_health_check_endpoint(app: FastAPI, health_function: HealthFunction) -> FastAPI: + """Add a health check endpoint to the app. The health function should return True if the service is healthy, + and False otherwise. The health function is called when the endpoint is hit. + """ + + @app.get("/health") + @app.get("/ready") + def check_health(): + if health_function(): + return {"status": "OK"}, 200 + else: + return {"status": "Service Unavailable"}, 503 + + return app diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index ea959b5..86fca6c 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -4,10 +4,10 @@ import time from fastapi import FastAPI from pyinfra.config.loader import load_settings -from pyinfra.monitor.prometheus import make_prometheus_processing_time_decorator_from_settings, add_prometheus_endpoint +from pyinfra.webserver.prometheus import make_prometheus_processing_time_decorator_from_settings, add_prometheus_endpoint from pyinfra.queue.callback import make_queue_message_callback from pyinfra.queue.manager import QueueManager -from pyinfra.webserver.utils import create_webserver_thread_from_settings +from pyinfra.webserver.utils import create_webserver_thread_from_settings, add_health_check_endpoint logging.basicConfig() logger = logging.getLogger() @@ -28,13 +28,7 @@ def main(): queue_manager = QueueManager(settings) - @app.get("/ready") - @app.get("/health") - def check_health(): - if queue_manager.is_ready(): - return {"status": "OK"}, 200 - else: - return {"status": "Service Unavailable"}, 503 + app = add_health_check_endpoint(app, queue_manager.is_ready) webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() diff --git a/tests/unit_test/prometheus_monitoring_test.py b/tests/unit_test/prometheus_monitoring_test.py index 452954c..3ced056 100644 --- a/tests/unit_test/prometheus_monitoring_test.py +++ b/tests/unit_test/prometheus_monitoring_test.py @@ -5,7 +5,7 @@ import pytest import requests from fastapi import FastAPI -from pyinfra.monitor.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings +from pyinfra.webserver.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings from pyinfra.webserver.utils import create_webserver_thread_from_settings diff --git a/tests/unit_test/queue_test.py b/tests/unit_test/queue_test.py index 449cf30..720c6c3 100644 --- a/tests/unit_test/queue_test.py +++ b/tests/unit_test/queue_test.py @@ -15,7 +15,7 @@ logger.add(sink=stdout, level="DEBUG") def make_callback(process_time): def callback(x): sleep(process_time) - return json.dumps({"status": "success"}) + return {"status": "success"} return callback @@ -56,7 +56,7 @@ class TestQueueManager: for _ in range(2): response = queue_manager.get_message_from_output_queue() assert response is not None - assert response[2] == b'{"status": "success"}' + assert json.loads(response[2].decode()) == {"status": "success"} def test_all_headers_beginning_with_x_are_forwarded(self, queue_manager, input_message, stop_message): queue_manager.purge_queues() @@ -78,7 +78,7 @@ class TestQueueManager: response = queue_manager.get_message_from_output_queue() - assert response[2] == b'{"status": "success"}' + assert json.loads(response[2].decode()) == {"status": "success"} assert response[1].headers["X-TENANT-ID"] == "redaction" assert response[1].headers["X-OTHER-HEADER"] == "other-header-value" @@ -97,4 +97,4 @@ class TestQueueManager: response = queue_manager.get_message_from_output_queue() - assert response[2] == b'{"status": "success"}' + assert json.loads(response[2].decode()) == {"status": "success"} From 87cbf896723fdbd42ee1c4b66dbcc04353dbb0dc Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 19 Jan 2024 14:04:56 +0100 Subject: [PATCH 15/39] finnish config loading logic --- pyinfra/config/loader.py | 55 +++++++++++++++---- .../config/{validation.py => validators.py} | 19 +------ pyinfra/queue/manager.py | 3 +- pyinfra/storage/connection.py | 3 +- pyinfra/storage/storages/azure.py | 3 +- pyinfra/storage/storages/s3.py | 3 +- pyinfra/webserver/prometheus.py | 23 ++++---- pyinfra/webserver/utils.py | 3 +- scripts/start_pyinfra.py | 25 ++++----- 9 files changed, 77 insertions(+), 60 deletions(-) rename pyinfra/config/{validation.py => validators.py} (75%) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index a66bbd7..a705927 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -1,23 +1,56 @@ import os from pathlib import Path +from typing import Union -from dynaconf import Dynaconf +import funcy +from dynaconf import Dynaconf, ValidationError +from funcy import merge, lflatten +from kn_utils.logging import logger -def load_settings(): - # TODO: Make dynamic, so that the settings.toml file can be loaded from any location - # TODO: add validation - root_path = Path(__file__).resolve().parents[1] # this is pyinfra/ - repo_root_path = root_path.parents[0] # this is the root of the repo - os.environ["ROOT_PATH"] = str(root_path) - os.environ["REPO_ROOT_PATH"] = str(repo_root_path) +def load_settings(settings_path: Union[str, Path] = None): + if not settings_path: + repo_root_path = Path(__file__).resolve().parents[2] + settings_path = repo_root_path / "config/" + logger.info(f"No settings path provided, using relative settings path: {settings_path}") + + settings_path = Path(settings_path) + + if os.path.isdir(settings_path): + logger.info(f"Settings path is a directory, loading all .toml files in the directory: {settings_path}") + settings_files = list(settings_path.glob("*.toml")) + else: + logger.info(f"Settings path is a file, loading only the specified file: {settings_path}") + settings_files = [settings_path] settings = Dynaconf( load_dotenv=True, envvar_prefix=False, - settings_files=[ - repo_root_path / "config" / "settings.toml", - ], + settings_files=settings_files, ) + validate_settings(settings, get_all_validators()) + return settings + + +def get_all_validators(): + import pyinfra.config.validators + + return lflatten(validator for validator in pyinfra.config.validators.__dict__.values() if isinstance(validator, list)) + + +def validate_settings(settings: Dynaconf, validators): + settings_valid = True + + for validator in validators: + try: + validator.validate(settings) + except ValidationError as e: + settings_valid = False + logger.warning(e) + + if not settings_valid: + raise ValidationError("Settings validation failed.") + + logger.debug("Settings validated.") diff --git a/pyinfra/config/validation.py b/pyinfra/config/validators.py similarity index 75% rename from pyinfra/config/validation.py rename to pyinfra/config/validators.py index 629891d..bdc6204 100644 --- a/pyinfra/config/validation.py +++ b/pyinfra/config/validators.py @@ -1,5 +1,4 @@ -from dynaconf import Validator, Dynaconf, ValidationError -from kn_utils.logging import logger +from dynaconf import Validator queue_manager_validators = [ Validator("rabbitmq.host", must_exist=True), @@ -45,19 +44,3 @@ webserver_validators = [ Validator("webserver.host", must_exist=True), Validator("webserver.port", must_exist=True), ] - - -def validate_settings(settings: Dynaconf, validators): - settings_valid = True - - for validator in validators: - try: - validator.validate(settings) - except ValidationError as e: - settings_valid = False - logger.warning(e) - - if not settings_valid: - raise ValidationError("Settings validation failed.") - - logger.info("Settings validated.") diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index 34d5313..ea9c1b4 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -13,7 +13,8 @@ from kn_utils.logging import logger from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry -from pyinfra.config.validation import queue_manager_validators, validate_settings +from pyinfra.config.validators import queue_manager_validators +from pyinfra.config.loader import validate_settings pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index b7c5845..abe0d5f 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -8,7 +8,8 @@ from pyinfra.storage.storages.azure import get_azure_storage_from_settings from pyinfra.storage.storages.s3 import get_s3_storage_from_settings from pyinfra.storage.storages.storage import Storage from pyinfra.utils.cipher import decrypt -from pyinfra.config.validation import storage_validators, multi_tenant_storage_validators, validate_settings +from pyinfra.config.validators import storage_validators, multi_tenant_storage_validators +from pyinfra.config.loader import validate_settings def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index 5689a51..1207d9a 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -8,7 +8,8 @@ from kn_utils.logging import logger from retry import retry from pyinfra.storage.storages.storage import Storage -from pyinfra.config.validation import azure_storage_validators, validate_settings +from pyinfra.config.validators import azure_storage_validators +from pyinfra.config.loader import validate_settings logging.getLogger("azure").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index 52f3957..0a6636e 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -8,7 +8,8 @@ from minio import Minio from retry import retry from pyinfra.storage.storages.storage import Storage -from pyinfra.config.validation import s3_storage_validators, validate_settings +from pyinfra.config.validators import s3_storage_validators +from pyinfra.config.loader import validate_settings from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint diff --git a/pyinfra/webserver/prometheus.py b/pyinfra/webserver/prometheus.py index f9bddd4..a274dbf 100644 --- a/pyinfra/webserver/prometheus.py +++ b/pyinfra/webserver/prometheus.py @@ -7,7 +7,8 @@ from funcy import identity from prometheus_client import generate_latest, CollectorRegistry, REGISTRY, Summary from starlette.responses import Response -from pyinfra.config.validation import prometheus_validators, validate_settings +from pyinfra.config.validators import prometheus_validators +from pyinfra.config.loader import validate_settings def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY) -> FastAPI: @@ -31,28 +32,24 @@ Decorator = TypeVar("Decorator", bound=Callable[[Callable], Callable]) def make_prometheus_processing_time_decorator_from_settings( - settings: Dynaconf, registry: CollectorRegistry = REGISTRY + settings: Dynaconf, + postfix: str = "processing_time", + registry: CollectorRegistry = REGISTRY, ) -> Decorator: """Make a decorator for monitoring the processing time of a function. The decorator is only applied if the prometheus metrics are enabled in the settings. + This, and other metrics should follow the convention + {product name}_{service name}_{processing step / parameter to monitor}. """ validate_settings(settings, validators=prometheus_validators) if not settings.metrics.prometheus.enabled: return identity - return make_prometheus_processing_time_decorator( - prefix=settings.metrics.prometheus.prefix, - registry=registry, - ) - - -def make_prometheus_processing_time_decorator( - prefix: str = "readactmanager_research_service", - registry: CollectorRegistry = REGISTRY, -) -> Decorator: processing_time_sum = Summary( - f"{prefix}_processing_time", "Summed up average processing time per call.", registry=registry + f"{settings.metrics.prometheus.prefix}_{postfix}", + "Summed up processing time per call.", + registry=registry, ) def decorator(process_fn: Callable) -> Callable: diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index fc7534b..fc8a16c 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -6,7 +6,8 @@ import uvicorn from dynaconf import Dynaconf from fastapi import FastAPI -from pyinfra.config.validation import webserver_validators, validate_settings +from pyinfra.config.validators import webserver_validators +from pyinfra.config.loader import validate_settings def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 86fca6c..fa2d5de 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,30 +1,29 @@ -import logging import time +from pathlib import Path +from typing import Union from fastapi import FastAPI from pyinfra.config.loader import load_settings -from pyinfra.webserver.prometheus import make_prometheus_processing_time_decorator_from_settings, add_prometheus_endpoint from pyinfra.queue.callback import make_queue_message_callback from pyinfra.queue.manager import QueueManager +from pyinfra.webserver.prometheus import ( + make_prometheus_processing_time_decorator_from_settings, + add_prometheus_endpoint, +) from pyinfra.webserver.utils import create_webserver_thread_from_settings, add_health_check_endpoint -logging.basicConfig() -logger = logging.getLogger() -logger.setLevel(logging.INFO) -settings = load_settings() - - -@make_prometheus_processing_time_decorator_from_settings(settings) -def json_processor_mock(_data: dict, _message: dict) -> dict: +def processor_mock(_data: dict, _message: dict) -> dict: time.sleep(5) return {"result1": "result1"} -def main(): +def start_serving(process_fn, settings_path: Union[str, Path] = None): + settings = load_settings(settings_path) app = FastAPI() app = add_prometheus_endpoint(app) + process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) queue_manager = QueueManager(settings) @@ -33,9 +32,9 @@ def main(): webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() - callback = make_queue_message_callback(json_processor_mock, settings) + callback = make_queue_message_callback(process_fn, settings) queue_manager.start_consuming(callback) if __name__ == "__main__": - main() + start_serving(processor_mock) From 8cd1d6b283385aec0253476e3accbeed99612539 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 19 Jan 2024 14:15:00 +0100 Subject: [PATCH 16/39] add retries to queue consuming, so we retray at least a bit if something happens. Eventually the container should crash though since there do exist unfixable problems sadly. --- pyinfra/queue/manager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index ea9c1b4..da49299 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -80,6 +80,7 @@ class QueueManager: self.establish_connection() return self.channel.is_open + @retry(exceptions=pika.exceptions.AMQPConnectionError, tries=3, delay=5, jitter=(1, 3), logger=logger) def start_consuming(self, message_processor: Callable): on_message_callback = self._make_on_message_callback(message_processor) From 73eba97eded00732faec4275e902b8455ed37bf1 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Fri, 19 Jan 2024 14:53:02 +0100 Subject: [PATCH 17/39] Add serving example TODO: - update readme - check if logs are adequate --- pyinfra/config/loader.py | 4 ++-- pyinfra/examples.py | 35 ++++++++++++++++++++++++++++++++ scripts/start_pyinfra.py | 44 +++++++++++++++------------------------- 3 files changed, 53 insertions(+), 30 deletions(-) create mode 100644 pyinfra/examples.py diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index a705927..56658f9 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -2,13 +2,13 @@ import os from pathlib import Path from typing import Union -import funcy from dynaconf import Dynaconf, ValidationError -from funcy import merge, lflatten +from funcy import lflatten from kn_utils.logging import logger def load_settings(settings_path: Union[str, Path] = None): + if not settings_path: repo_root_path = Path(__file__).resolve().parents[2] settings_path = repo_root_path / "config/" diff --git a/pyinfra/examples.py b/pyinfra/examples.py new file mode 100644 index 0000000..9297101 --- /dev/null +++ b/pyinfra/examples.py @@ -0,0 +1,35 @@ +from dynaconf import Dynaconf +from fastapi import FastAPI + +from pyinfra.queue.callback import make_queue_message_callback, DataProcessor +from pyinfra.queue.manager import QueueManager +from pyinfra.webserver.prometheus import add_prometheus_endpoint, \ + make_prometheus_processing_time_decorator_from_settings +from pyinfra.webserver.utils import add_health_check_endpoint, create_webserver_thread_from_settings + + +def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataProcessor, settings: Dynaconf): + """Default serving logic for research services. + + Supplies /health, /ready and /prometheus endpoints. The process_fn is monitored for processing time per call. + Workload is only received via queue messages. The message contains a file path to the data to be processed, which + gets downloaded from the storage. The data and the message are then passed to the process_fn. The process_fn should + return a json-dump-able object. This object is then uploaded to the storage. The response message is just the + original message. + + Adapt as needed. + """ + app = FastAPI() + + app = add_prometheus_endpoint(app) + process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) + + queue_manager = QueueManager(settings) + + app = add_health_check_endpoint(app, queue_manager.is_ready) + + webserver_thread = create_webserver_thread_from_settings(app, settings) + webserver_thread.start() + + callback = make_queue_message_callback(process_fn, settings) + queue_manager.start_consuming(callback) diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index fa2d5de..29874ea 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,17 +1,21 @@ +import argparse import time from pathlib import Path -from typing import Union - -from fastapi import FastAPI from pyinfra.config.loader import load_settings -from pyinfra.queue.callback import make_queue_message_callback -from pyinfra.queue.manager import QueueManager -from pyinfra.webserver.prometheus import ( - make_prometheus_processing_time_decorator_from_settings, - add_prometheus_endpoint, -) -from pyinfra.webserver.utils import create_webserver_thread_from_settings, add_health_check_endpoint +from pyinfra.examples import start_queue_consumer_with_prometheus_and_health_endpoints + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--settings_path", + "-s", + type=Path, + default=None, + help="Path to settings file or folder. Must be a .toml file or a folder containing .toml files.", + ) + return parser.parse_args() def processor_mock(_data: dict, _message: dict) -> dict: @@ -19,22 +23,6 @@ def processor_mock(_data: dict, _message: dict) -> dict: return {"result1": "result1"} -def start_serving(process_fn, settings_path: Union[str, Path] = None): - settings = load_settings(settings_path) - app = FastAPI() - app = add_prometheus_endpoint(app) - process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) - - queue_manager = QueueManager(settings) - - app = add_health_check_endpoint(app, queue_manager.is_ready) - - webserver_thread = create_webserver_thread_from_settings(app, settings) - webserver_thread.start() - - callback = make_queue_message_callback(process_fn, settings) - queue_manager.start_consuming(callback) - - if __name__ == "__main__": - start_serving(processor_mock) + settings = load_settings(parse_args().settings_path) + start_queue_consumer_with_prometheus_and_health_endpoints(processor_mock, settings) From adfbd650e61d7b74fcc9c9d1deb6f0967fe9ff7b Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 23 Jan 2024 08:51:44 +0100 Subject: [PATCH 18/39] Add config tests, add type validation to config loading --- pyinfra/config/loader.py | 25 ++++++++++-------- pyinfra/config/validators.py | 46 +++++++++++++++++----------------- scripts/send_request.py | 5 ++-- scripts/start_pyinfra.py | 4 +-- tests/conftest.py | 4 +-- tests/unit_test/config_test.py | 36 ++++++++++++++++++++++++++ 6 files changed, 79 insertions(+), 41 deletions(-) create mode 100644 tests/unit_test/config_test.py diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index 56658f9..1e10c44 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -2,21 +2,19 @@ import os from pathlib import Path from typing import Union -from dynaconf import Dynaconf, ValidationError +from dynaconf import Dynaconf, ValidationError, Validator from funcy import lflatten from kn_utils.logging import logger -def load_settings(settings_path: Union[str, Path] = None): +def load_settings(settings_path: Union[str, Path] = None, validators: list[Validator] = None): + settings_path = Path(settings_path) if settings_path else None + validators = validators or get_all_validators() if not settings_path: - repo_root_path = Path(__file__).resolve().parents[2] - settings_path = repo_root_path / "config/" - logger.info(f"No settings path provided, using relative settings path: {settings_path}") - - settings_path = Path(settings_path) - - if os.path.isdir(settings_path): + logger.info("No settings path specified, only loading .env end ENVs.") + settings_files = [] + elif os.path.isdir(settings_path): logger.info(f"Settings path is a directory, loading all .toml files in the directory: {settings_path}") settings_files = list(settings_path.glob("*.toml")) else: @@ -29,15 +27,20 @@ def load_settings(settings_path: Union[str, Path] = None): settings_files=settings_files, ) - validate_settings(settings, get_all_validators()) + validate_settings(settings, validators) return settings +pyinfra_config_path = Path(__file__).resolve().parents[2] / "config/" + + def get_all_validators(): import pyinfra.config.validators - return lflatten(validator for validator in pyinfra.config.validators.__dict__.values() if isinstance(validator, list)) + return lflatten( + validator for validator in pyinfra.config.validators.__dict__.values() if isinstance(validator, list) + ) def validate_settings(settings: Dynaconf, validators): diff --git a/pyinfra/config/validators.py b/pyinfra/config/validators.py index bdc6204..e0ae2af 100644 --- a/pyinfra/config/validators.py +++ b/pyinfra/config/validators.py @@ -1,46 +1,46 @@ from dynaconf import Validator queue_manager_validators = [ - Validator("rabbitmq.host", must_exist=True), - Validator("rabbitmq.port", must_exist=True), - Validator("rabbitmq.username", must_exist=True), - Validator("rabbitmq.password", must_exist=True), - Validator("rabbitmq.heartbeat", must_exist=True), - Validator("rabbitmq.connection_sleep", must_exist=True), - Validator("rabbitmq.input_queue", must_exist=True), - Validator("rabbitmq.output_queue", must_exist=True), - Validator("rabbitmq.dead_letter_queue", must_exist=True), + Validator("rabbitmq.host", must_exist=True, is_type_of=str), + Validator("rabbitmq.port", must_exist=True, is_type_of=int), + Validator("rabbitmq.username", must_exist=True, is_type_of=str), + Validator("rabbitmq.password", must_exist=True, is_type_of=str), + Validator("rabbitmq.heartbeat", must_exist=True, is_type_of=int), + Validator("rabbitmq.connection_sleep", must_exist=True, is_type_of=int), + Validator("rabbitmq.input_queue", must_exist=True, is_type_of=str), + Validator("rabbitmq.output_queue", must_exist=True, is_type_of=str), + Validator("rabbitmq.dead_letter_queue", must_exist=True, is_type_of=str), ] azure_storage_validators = [ - Validator("storage.azure.connection_string", must_exist=True), - Validator("storage.azure.container", must_exist=True), + Validator("storage.azure.connection_string", must_exist=True, is_type_of=str), + Validator("storage.azure.container", must_exist=True, is_type_of=str), ] s3_storage_validators = [ - Validator("storage.s3.endpoint", must_exist=True), - Validator("storage.s3.key", must_exist=True), - Validator("storage.s3.secret", must_exist=True), - Validator("storage.s3.region", must_exist=True), - Validator("storage.s3.bucket", must_exist=True), + Validator("storage.s3.endpoint", must_exist=True, is_type_of=str), + Validator("storage.s3.key", must_exist=True, is_type_of=str), + Validator("storage.s3.secret", must_exist=True, is_type_of=str), + Validator("storage.s3.region", must_exist=True, is_type_of=str), + Validator("storage.s3.bucket", must_exist=True, is_type_of=str), ] storage_validators = [ - Validator("storage.backend", must_exist=True), + Validator("storage.backend", must_exist=True, is_type_of=str), ] multi_tenant_storage_validators = [ - Validator("storage.tenant_server.endpoint", must_exist=True), - Validator("storage.tenant_server.public_key", must_exist=True), + Validator("storage.tenant_server.endpoint", must_exist=True, is_type_of=str), + Validator("storage.tenant_server.public_key", must_exist=True, is_type_of=str), ] prometheus_validators = [ - Validator("metrics.prometheus.prefix", must_exist=True), - Validator("metrics.prometheus.enabled", must_exist=True), + Validator("metrics.prometheus.prefix", must_exist=True, is_type_of=str), + Validator("metrics.prometheus.enabled", must_exist=True, is_type_of=bool), ] webserver_validators = [ - Validator("webserver.host", must_exist=True), - Validator("webserver.port", must_exist=True), + Validator("webserver.host", must_exist=True, is_type_of=str), + Validator("webserver.port", must_exist=True, is_type_of=int), ] diff --git a/scripts/send_request.py b/scripts/send_request.py index 79e6a4c..5e464fb 100644 --- a/scripts/send_request.py +++ b/scripts/send_request.py @@ -2,14 +2,13 @@ import gzip import json from operator import itemgetter -import pika from kn_utils.logging import logger -from pyinfra.config.loader import load_settings +from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.queue.manager import QueueManager from pyinfra.storage.storages.s3 import get_s3_storage_from_settings -settings = load_settings() +settings = load_settings(pyinfra_config_path) def upload_json_and_make_message_body(): diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 29874ea..013367b 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -2,7 +2,7 @@ import argparse import time from pathlib import Path -from pyinfra.config.loader import load_settings +from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.examples import start_queue_consumer_with_prometheus_and_health_endpoints @@ -12,7 +12,7 @@ def parse_args(): "--settings_path", "-s", type=Path, - default=None, + default=pyinfra_config_path, help="Path to settings file or folder. Must be a .toml file or a folder containing .toml files.", ) return parser.parse_args() diff --git a/tests/conftest.py b/tests/conftest.py index 81750c8..4939c83 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,12 @@ import pytest -from pyinfra.config.loader import load_settings +from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.storage.connection import get_storage_from_settings @pytest.fixture(scope="session") def settings(): - return load_settings() + return load_settings(pyinfra_config_path) @pytest.fixture(scope="class") diff --git a/tests/unit_test/config_test.py b/tests/unit_test/config_test.py new file mode 100644 index 0000000..363dbac --- /dev/null +++ b/tests/unit_test/config_test.py @@ -0,0 +1,36 @@ +import os + +import pytest +from dynaconf import Validator + +from pyinfra.config.loader import load_settings +from pyinfra.config.validators import webserver_validators + + +@pytest.fixture +def test_validators(): + return [ + Validator("test.value.int", must_exist=True, is_type_of=int), + Validator("test.value.str", must_exist=True, is_type_of=str), + ] + + +class TestConfig: + def test_config_validation(self): + os.environ["WEBSERVER__HOST"] = "localhost" + os.environ["WEBSERVER__PORT"] = "8080" + + validators = webserver_validators + + test_settings = load_settings(validators=validators) + + assert test_settings.webserver.host == "localhost" + + def test_env_into_correct_type_conversion(self, test_validators): + os.environ["TEST__VALUE__INT"] = "1" + os.environ["TEST__VALUE__STR"] = "test" + + test_settings = load_settings(validators=test_validators) + + assert test_settings.test.value.int == 1 + assert test_settings.test.value.str == "test" From d6eeb65cccfcaf6c064150cd71fa06aa4dd3a62b Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 23 Jan 2024 10:25:56 +0100 Subject: [PATCH 19/39] Update scripts --- scripts/send_request.py | 2 -- scripts/start_pyinfra.py | 5 ++++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/send_request.py b/scripts/send_request.py index 5e464fb..b33f9a1 100644 --- a/scripts/send_request.py +++ b/scripts/send_request.py @@ -12,8 +12,6 @@ settings = load_settings(pyinfra_config_path) def upload_json_and_make_message_body(): - bucket = settings.storage.s3.bucket - dossier_id, file_id, suffix = "dossier", "file", "json.gz" content = { "numberOfPages": 7, diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 013367b..de57496 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -2,6 +2,8 @@ import argparse import time from pathlib import Path +from kn_utils.logging import logger + from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.examples import start_queue_consumer_with_prometheus_and_health_endpoints @@ -18,7 +20,8 @@ def parse_args(): return parser.parse_args() -def processor_mock(_data: dict, _message: dict) -> dict: +def processor_mock(_data: dict, message: dict) -> dict: + logger.info(f"Received message for tenant {message.get('X-TENANT-ID')}") time.sleep(5) return {"result1": "result1"} From 429a85b609525899549b710e63185cdab8ba0f42 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 23 Jan 2024 10:26:44 +0100 Subject: [PATCH 20/39] Disable automated tests until we found a way to rund docker compose before --- .gitlab-ci.yml | 2 +- README.md | 4 ++-- tests/conftest.py | 2 ++ 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bcd83e8..b4da961 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,4 +8,4 @@ default: run-tests: script: - - pytest . + - echo "Disabled until we have an automated way to run docker compose before tests." diff --git a/README.md b/README.md index b9dc2b8..4e4213d 100755 --- a/README.md +++ b/README.md @@ -113,9 +113,9 @@ priority = "explicit" ### API ```python -from pyinfra import config +from pyinfra import loader from pyinfra.payload_processing.processor import make_payload_processor -from pyinfra.queue.queue_manager import QueueManager +from pyinfra.queue.manager import QueueManager pyinfra_config = config.get_config() diff --git a/tests/conftest.py b/tests/conftest.py index 4939c83..2dd03d8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ import pytest from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.storage.connection import get_storage_from_settings +pytest_plugins = ["docker_compose"] + @pytest.fixture(scope="session") def settings(): From be602d8411b3664eb5027eed51a3ecb001bf3ee3 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 23 Jan 2024 14:10:56 +0100 Subject: [PATCH 21/39] Adjust logs --- pyinfra/config/loader.py | 1 + pyinfra/examples.py | 7 +++++-- pyinfra/queue/callback.py | 4 ++-- pyinfra/queue/manager.py | 6 +++++- pyinfra/storage/connection.py | 2 ++ pyinfra/storage/utils.py | 1 + scripts/start_pyinfra.py | 3 +-- tests/conftest.py | 2 -- 8 files changed, 17 insertions(+), 9 deletions(-) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index 1e10c44..d89a95d 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -28,6 +28,7 @@ def load_settings(settings_path: Union[str, Path] = None, validators: list[Valid ) validate_settings(settings, validators) + logger.info("Settings loaded and validated.") return settings diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 9297101..0bc512a 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -1,7 +1,8 @@ from dynaconf import Dynaconf from fastapi import FastAPI +from kn_utils.logging import logger -from pyinfra.queue.callback import make_queue_message_callback, DataProcessor +from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor from pyinfra.queue.manager import QueueManager from pyinfra.webserver.prometheus import add_prometheus_endpoint, \ make_prometheus_processing_time_decorator_from_settings @@ -19,6 +20,8 @@ def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataPr Adapt as needed. """ + logger.info(f"Starting webserver and queue consumer...") + app = FastAPI() app = add_prometheus_endpoint(app) @@ -31,5 +34,5 @@ def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataPr webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() - callback = make_queue_message_callback(process_fn, settings) + callback = make_download_process_upload_callback(process_fn, settings) queue_manager.start_consuming(callback) diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py index 3fa8f22..f678f27 100644 --- a/pyinfra/queue/callback.py +++ b/pyinfra/queue/callback.py @@ -9,7 +9,7 @@ from pyinfra.storage.utils import download_data_as_specified_in_message, upload_ DataProcessor = Callable[[Union[dict, bytes], dict], dict] -def make_queue_message_callback(data_processor: DataProcessor, settings: Dynaconf): +def make_download_process_upload_callback(data_processor: DataProcessor, settings: Dynaconf): """Default callback for processing queue messages. Data will be downloaded from the storage as specified in the message. If a tenant id is specified, the storage will be configured to use that tenant id, otherwise the storage is configured as specified in the settings. @@ -21,7 +21,7 @@ def make_queue_message_callback(data_processor: DataProcessor, settings: Dynacon """ def inner(queue_message_payload: dict) -> dict: - logger.info(f"Processing payload...") + logger.info(f"Processing payload with download-process-upload callback...") storage = get_storage(settings, queue_message_payload.get("X-TENANT-ID")) diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index da49299..767d723 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -135,10 +135,14 @@ class QueueManager: def _make_on_message_callback(self, message_processor: MessageProcessor): def process_message_body_and_await_result(unpacked_message_body): + # Processing the message in a separate thread is necessary for the main thread pika client to be able to + # process data events (e.g. heartbeats) while the message is being processed. with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread_pool_executor: - logger.debug("Processing payload in separate thread.") + logger.info("Processing payload in separate thread.") future = thread_pool_executor.submit(message_processor, unpacked_message_body) + # FIXME: This block is probably not necessary, but kept since the implications of removing it are + # unclear. Remove it in a future iteration where less changes are being made to the code base. while future.running(): logger.debug("Waiting for payload processing to finish...") self.connection.process_data_events() diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index abe0d5f..75d278a 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -20,6 +20,8 @@ def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: In the future, when the default storage from config is no longer needed (only multi-tenant storage will be used), get_storage_from_tenant_id can replace this function directly. """ + logger.info("Establishing storage connection...") + if tenant_id: logger.info(f"Using tenant storage for {tenant_id}.") return get_storage_from_tenant_id(tenant_id, settings) diff --git a/pyinfra/storage/utils.py b/pyinfra/storage/utils.py index d125115..bb3598b 100644 --- a/pyinfra/storage/utils.py +++ b/pyinfra/storage/utils.py @@ -67,6 +67,7 @@ def download_data_as_specified_in_message(storage: Storage, raw_payload: dict) - data = gzip.decompress(data) if ".gz" in payload.targetFilePath else data data = json.loads(data.decode("utf-8")) if ".json" in payload.targetFilePath else data + logger.info(f"Downloaded {payload.targetFilePath} from storage.") return data diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index de57496..b89d4c3 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -20,8 +20,7 @@ def parse_args(): return parser.parse_args() -def processor_mock(_data: dict, message: dict) -> dict: - logger.info(f"Received message for tenant {message.get('X-TENANT-ID')}") +def processor_mock(_data: dict, _message: dict) -> dict: time.sleep(5) return {"result1": "result1"} diff --git a/tests/conftest.py b/tests/conftest.py index 2dd03d8..4939c83 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,8 +3,6 @@ import pytest from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.storage.connection import get_storage_from_settings -pytest_plugins = ["docker_compose"] - @pytest.fixture(scope="session") def settings(): From 725d6dce450217f40e5120b4e6ba5e79132d39a4 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 23 Jan 2024 18:08:57 +0100 Subject: [PATCH 22/39] Update readme --- README.md | 170 +++++++++++++++++++------------------- pyinfra/config/loader.py | 13 +++ pyinfra/examples.py | 2 +- pyinfra/queue/callback.py | 2 +- pyinfra/storage/utils.py | 6 +- scripts/start_pyinfra.py | 18 +--- 6 files changed, 104 insertions(+), 107 deletions(-) diff --git a/README.md b/README.md index 4e4213d..a55a974 100755 --- a/README.md +++ b/README.md @@ -2,75 +2,89 @@ 1. [ About ](#about) 2. [ Configuration ](#configuration) -3. [ Response Format ](#response-format) -4. [ Usage & API ](#usage--api) +3. [ Queue Manager ](#queue-manager) +4. [ Module Installation ](#module-installation) 5. [ Scripts ](#scripts) 6. [ Tests ](#tests) + ## About -Common Module with the infrastructure to deploy Research Projects. -The Infrastructure expects to be deployed in the same Pod / local environment as the analysis container and handles all outbound communication. +Shared library for the research team, containing code related to infrastructure and communication with other services. +Offers a simple interface for processing data and sending responses via AMQP, monitoring via Prometheus and storage +access via S3 or Azure. + +To start, see the [complete example](pyinfra/examples.py) which shows how to use all features of the service and can be +imported and used directly for default research service pipelines (data ID in message, download data from storage, +upload result while offering Prometheus monitoring, /health and /ready endpoints and multi tenancy support). ## Configuration -A configuration is located in `/config.yaml`. All relevant variables can be configured via exporting environment variables. +Configuration is done via `Dynaconf`. This means that you can use environment variables, a `.env` file or `.toml` +file(s) to configure the service. You can also combine these methods. The precedence is +`environment variables > .env > .toml`. It is recommended to load settings with the provided +[`load_settings`](pyinfra/config/loader.py) function, which you can combine with the provided +[`parse_args`](pyinfra/config/loader.py) function. This allows you to load settings from a `.toml` file or a folder with +`.toml` files and override them with environment variables. -| Environment Variable | Default | Description | -|-------------------------------|----------------------------------|--------------------------------------------------------------------------| -| LOGGING_LEVEL_ROOT | "DEBUG" | Logging level for service logger | -| MONITORING_ENABLED | True | Enables Prometheus monitoring | -| PROMETHEUS_METRIC_PREFIX | "redactmanager_research_service" | Prometheus metric prefix, per convention '{product_name}_{service name}' | -| PROMETHEUS_HOST | "127.0.0.1" | Prometheus webserver address | -| PROMETHEUS_PORT | 8080 | Prometheus webserver port | -| RABBITMQ_HOST | "localhost" | RabbitMQ host address | -| RABBITMQ_PORT | "5672" | RabbitMQ host port | -| RABBITMQ_USERNAME | "user" | RabbitMQ username | -| RABBITMQ_PASSWORD | "bitnami" | RabbitMQ password | -| RABBITMQ_HEARTBEAT | 60 | Controls AMQP heartbeat timeout in seconds | -| RABBITMQ_CONNECTION_SLEEP | 5 | Controls AMQP connection sleep timer in seconds | -| REQUEST_QUEUE | "request_queue" | Requests to service | -| RESPONSE_QUEUE | "response_queue" | Responses by service | -| DEAD_LETTER_QUEUE | "dead_letter_queue" | Messages that failed to process | -| STORAGE_BACKEND | "s3" | The type of storage to use {s3, azure} | -| STORAGE_BUCKET | "redaction" | The bucket / container to pull files specified in queue requests from | -| STORAGE_ENDPOINT | "http://127.0.0.1:9000" | Endpoint for s3 storage | -| STORAGE_KEY | "root" | User for s3 storage | -| STORAGE_SECRET | "password" | Password for s3 storage | -| STORAGE_AZURECONNECTIONSTRING | "DefaultEndpointsProtocol=..." | Connection string for Azure storage | -| STORAGE_AZURECONTAINERNAME | "redaction" | AKS container | -| WRITE_CONSUMER_TOKEN | "False" | Value to see if we should write a consumer token to a file | +The following table shows all necessary settings. You can find a preconfigured settings file for this service in +bitbucket. These are the complete settings, you only need all if using all features of the service as described in +the [complete example](pyinfra/examples.py). -## Response Format +| Environment Variable | Internal / .toml Name | Description | +|------------------------------------|----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| LOGGING__LEVEL | logging.level | Log level | +| METRICS__PROMETHEUS__ENABLED | metrics.prometheus.enabled | Enable Prometheus metrics collection | +| METRICS__PROMETHEUS__PREFIX | metrics.prometheus.prefix | Prefix for Prometheus metrics (e.g. {product}-{service}) | +| WEBSERVER__HOST | webserver.host | Host of the webserver (offering e.g. /prometheus, /ready and /health endpoints) | +| WEBSERVER__PORT | webserver.port | Port of the webserver | +| RABBITMQ__HOST | rabbitmq.host | Host of the RabbitMQ server | +| RABBITMQ__PORT | rabbitmq.port | Port of the RabbitMQ server | +| RABBITMQ__USERNAME | rabbitmq.username | Username for the RabbitMQ server | +| RABBITMQ__PASSWORD | rabbitmq.password | Password for the RabbitMQ server | +| RABBITMQ__HEARTBEAT | rabbitmq.heartbeat | Heartbeat for the RabbitMQ server | +| RABBITMQ__CONNECTION_SLEEP | rabbitmq.connection_sleep | Sleep time intervals during message processing. Has to be a divider of heartbeat, and shouldn't be too big, since only in these intervals queue interactions happen (like receiving new messages) This is also the minimum time the service needs to process a message. | +| RABBITMQ__INPUT_QUEUE | rabbitmq.input_queue | Name of the input queue | +| RABBITMQ__OUTPUT_QUEUE | rabbitmq.output_queue | Name of the output queue | +| RABBITMQ__DEAD_LETTER_QUEUE | rabbitmq.dead_letter_queue | Name of the dead letter queue | +| STORAGE__BACKEND | storage.backend | Storage backend to use (currently only "s3" and "azure" are supported) | +| STORAGE__CACHE_SIZE | storage.cache_size | Number of cached storage connection (to reduce connection stops and reconnects for multi tenancy). | +| STORAGE__S3__BUCKET_NAME | storage.s3.bucket_name | Name of the S3 bucket | +| STORAGE__S3__ENDPOINT | storage.s3.endpoint | Endpoint of the S3 server | +| STORAGE__S3__KEY | storage.s3.key | Access key for the S3 server | +| STORAGE__S3__SECRET | storage.s3.secret | Secret key for the S3 server | +| STORAGE__S3__REGION | storage.s3.region | Region of the S3 server | +| STORAGE__AZURE__CONTAINER | storage.azure.container_name | Name of the Azure container | +| STORAGE__AZURE__CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | +| STORAGE__TENANT_SERVER__PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | +| STORAGE__TENANT_SERVER__ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | -### Expected AMQP input message: +## Queue Manager + +The queue manager is responsible for consuming messages from the input queue, processing them and sending the response +to the output queue. The default callback also downloads data from the storage and uploads the result to the storage. +The response message does not contain the data itself, but the identifiers from the input message (including headers +beginning with "X-"). + +Usage: + +```python +from pyinfra.queue.manager import QueueManager +from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor +from pyinfra.config.loader import load_settings + +settings = load_settings("path/to/settings") +processing_function: DataProcessor # function should expect a dict (json) or bytes (pdf) as input and should return a json serializable object. + +queue_manager = QueueManager(settings) +queue_manager.start_consuming(make_download_process_upload_callback(processing_function, settings)) +``` + +### AMQP input message: Either use the legacy format with dossierId and fileId as strings or the new format where absolute paths are used. -A tenant ID can be optionally provided in the message header (key: "X-TENANT-ID") - - -```json -{ - "targetFilePath": "", - "responseFilePath": "" -} -``` - -or - -```json -{ - "dossierId": "", - "fileId": "", - "targetFileExtension": "", - "responseFileExtension": "" -} -``` - -Optionally, the input message can contain a field with the key `"operations"`. - -### AMQP output message: - +All headers beginning with "X-" are forwarded to the message processor, and returned in the response message (e.g. +"X-TENANT-ID" is used to acquire storage information for the tenant). ```json { @@ -84,19 +98,21 @@ or ```json { "dossierId": "", - "fileId": "" + "fileId": "", + "targetFileExtension": "", + "responseFileExtension": "" } ``` -## Usage & API +## Module Installation -### Setup - -Add the respective version of the pyinfra package to your pyproject.toml file. Make sure to add our gitlab registry as a source. -For now, all internal packages used by pyinfra also have to be added to the pyproject.toml file. +Add the respective version of the pyinfra package to your pyproject.toml file. Make sure to add our gitlab registry as a +source. +For now, all internal packages used by pyinfra also have to be added to the pyproject.toml file (namely kn-utils). Execute `poetry lock` and `poetry install` to install the packages. -You can look up the latest version of the package in the [gitlab registry](https://gitlab.knecon.com/knecon/research/pyinfra/-/packages). +You can look up the latest version of the package in +the [gitlab registry](https://gitlab.knecon.com/knecon/research/pyinfra/-/packages). For the used versions of internal dependencies, please refer to the [pyproject.toml](pyproject.toml) file. ```toml @@ -110,45 +126,29 @@ url = "https://gitlab.knecon.com/api/v4/groups/19/-/packages/pypi/simple" priority = "explicit" ``` -### API - -```python -from pyinfra import loader -from pyinfra.payload_processing.processor import make_payload_processor -from pyinfra.queue.manager import QueueManager - -pyinfra_config = config.get_config() - -process_payload = make_payload_processor(process_data, config=pyinfra_config) - -queue_manager = QueueManager(pyinfra_config) -queue_manager.start_consuming(process_payload) -``` - -`process_data` should expect a dict (json) or bytes (pdf) as input and should return a list of results. - ## Scripts ### Run pyinfra locally **Shell 1**: Start minio and rabbitmq containers + ```bash -$ cd tests && docker-compose up +$ cd tests && docker compose up ``` **Shell 2**: Start pyinfra with callback mock + ```bash $ python scripts/start_pyinfra.py ``` **Shell 3**: Upload dummy content on storage and publish message + ```bash $ python scripts/send_request.py ``` ## Tests -Running all tests take a bit longer than you are probably used to, because among other things the required startup times are -quite high for docker-compose dependent tests. This is why the tests are split into two parts. The first part contains all -tests that do not require docker-compose and the second part contains all tests that require docker-compose. -Per default, only the first part is executed, but when releasing a new version, all tests should be executed. \ No newline at end of file +Tests require a running minio and rabbitmq container, meaning you have to run `docker compose up` in the tests folder +before running the tests. diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index d89a95d..bfc728b 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -1,3 +1,4 @@ +import argparse import os from pathlib import Path from typing import Union @@ -58,3 +59,15 @@ def validate_settings(settings: Dynaconf, validators): raise ValidationError("Settings validation failed.") logger.debug("Settings validated.") + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--settings_path", + "-s", + type=Path, + default=pyinfra_config_path, + help="Path to settings file or folder. Must be a .toml file or a folder containing .toml files.", + ) + return parser.parse_args() diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 0bc512a..e147135 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -15,7 +15,7 @@ def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataPr Supplies /health, /ready and /prometheus endpoints. The process_fn is monitored for processing time per call. Workload is only received via queue messages. The message contains a file path to the data to be processed, which gets downloaded from the storage. The data and the message are then passed to the process_fn. The process_fn should - return a json-dump-able object. This object is then uploaded to the storage. The response message is just the + return a json serializable object. This object is then uploaded to the storage. The response message is just the original message. Adapt as needed. diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py index f678f27..2b39a45 100644 --- a/pyinfra/queue/callback.py +++ b/pyinfra/queue/callback.py @@ -14,7 +14,7 @@ def make_download_process_upload_callback(data_processor: DataProcessor, setting Data will be downloaded from the storage as specified in the message. If a tenant id is specified, the storage will be configured to use that tenant id, otherwise the storage is configured as specified in the settings. The data is the passed to the dataprocessor, together with the message. The dataprocessor should return a - json-dump-able object. This object is then uploaded to the storage as specified in the message. + json serializable object. This object is then uploaded to the storage as specified in the message. The response message is just the original message. Adapt as needed. diff --git a/pyinfra/storage/utils.py b/pyinfra/storage/utils.py index bb3598b..34f9764 100644 --- a/pyinfra/storage/utils.py +++ b/pyinfra/storage/utils.py @@ -73,11 +73,11 @@ def download_data_as_specified_in_message(storage: Storage, raw_payload: dict) - def upload_data_as_specified_in_message(storage: Storage, raw_payload: dict, data): - """Convenience function to upload a file specified in a message payload. For now, only json-dump-able data is + """Convenience function to upload a file specified in a message payload. For now, only json serializable data is supported. The storage json consists of the raw_payload, which is extended with a 'data' key, containing the data to be uploaded. - If the content is not a json-dump-able object, an exception will be raised. + If the content is not a json serializable object, an exception will be raised. If the result file identifier specifies compression with gzip (.gz), it will be compressed before upload. This function can be extended in the future as needed (e.g. if we need to upload images), but since further @@ -95,7 +95,7 @@ def upload_data_as_specified_in_message(storage: Storage, raw_payload: dict, dat raise ValueError("No upload file path found in payload, nothing to upload.") if ".json" not in payload.responseFilePath: - raise ValueError("Only json-dump-able data can be uploaded.") + raise ValueError("Only json serializable data can be uploaded.") data = {**raw_payload, "data": data} diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index b89d4c3..4cda348 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,25 +1,9 @@ -import argparse import time -from pathlib import Path -from kn_utils.logging import logger - -from pyinfra.config.loader import load_settings, pyinfra_config_path +from pyinfra.config.loader import load_settings, parse_args from pyinfra.examples import start_queue_consumer_with_prometheus_and_health_endpoints -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--settings_path", - "-s", - type=Path, - default=pyinfra_config_path, - help="Path to settings file or folder. Must be a .toml file or a folder containing .toml files.", - ) - return parser.parse_args() - - def processor_mock(_data: dict, _message: dict) -> dict: time.sleep(5) return {"result1": "result1"} From 936bb4fe80e4c15ead0f6b8a1652a9ae8090a733 Mon Sep 17 00:00:00 2001 From: Isaac Riley Date: Wed, 24 Jan 2024 08:09:42 +0100 Subject: [PATCH 23/39] feat: add opentelemetry on top of newly refactored pyinfra --- .pre-commit-config.yaml | 6 +- README.md | 4 +- poetry.lock | 1458 +++++++++++++++++++++++----------- pyinfra/config/validators.py | 5 + pyinfra/queue/manager.py | 24 +- pyproject.toml | 38 +- 6 files changed, 1084 insertions(+), 451 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68e4792..0177c31 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,10 +2,10 @@ # See https://pre-commit.com/hooks.html for more hooks exclude: ^(docs/|notebooks/|data/|src/secrets/|src/static/|src/templates/|tests) default_language_version: - python: python3.8 + python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -26,7 +26,7 @@ repos: args: ["--profile", "black"] - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 23.12.1 hooks: - id: black # exclude: ^(docs/|notebooks/|data/|src/secrets/) diff --git a/README.md b/README.md index a55a974..5f4b5e3 100755 --- a/README.md +++ b/README.md @@ -58,6 +58,8 @@ the [complete example](pyinfra/examples.py). | STORAGE__AZURE__CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | | STORAGE__TENANT_SERVER__PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | | STORAGE__TENANT_SERVER__ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | +| TRACING__ENDPOINT | tracing.endpoint | Endpoint to which OpenTelemetry traces are exported +| TRACING__SERVER_NAME | tracing.server_name | Name of the service as displayed in the traces collected ## Queue Manager @@ -139,7 +141,7 @@ $ cd tests && docker compose up **Shell 2**: Start pyinfra with callback mock ```bash -$ python scripts/start_pyinfra.py +$ python scripts/start_pyinfra.py ``` **Shell 3**: Upload dummy content on storage and publish message diff --git a/poetry.lock b/poetry.lock index d820b51..4d9b64d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -103,13 +103,13 @@ tests = ["pytest"] [[package]] name = "astroid" -version = "3.0.1" +version = "3.0.2" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.0.1-py3-none-any.whl", hash = "sha256:7d5895c9825e18079c5aeac0572bc2e4c83205c95d416e0b4fee8bc361d2d9ca"}, - {file = "astroid-3.0.1.tar.gz", hash = "sha256:86b0bb7d7da0be1a7c4aedb7974e391b32d4ed89e33de6ed6902b4b15c97577e"}, + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, ] [package.dependencies] @@ -135,17 +135,17 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "azure-core" -version = "1.29.5" +version = "1.29.7" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.5.tar.gz", hash = "sha256:52983c89d394c6f881a121e5101c5fa67278ca3b1f339c8fb2ef39230c70e9ac"}, - {file = "azure_core-1.29.5-py3-none-any.whl", hash = "sha256:0fa04b7b1f7d44a4fb8468c4093deb2ea01fdf4faddbf802ed9205615f99d68c"}, + {file = "azure-core-1.29.7.tar.gz", hash = "sha256:2944faf1a7ff1558b1f457cabf60f279869cabaeef86b353bed8eb032c7d8c5e"}, + {file = "azure_core-1.29.7-py3-none-any.whl", hash = "sha256:95a7b41b4af102e5fcdfac9500fcc82ff86e936c7145a099b7848b9ac0501250"}, ] [package.dependencies] -requests = ">=2.18.4" +requests = ">=2.21.0" six = ">=1.11.0" typing-extensions = ">=4.6.0" @@ -172,31 +172,46 @@ typing-extensions = ">=4.3.0" [package.extras] aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -210,7 +225,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -289,6 +304,17 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -415,13 +441,13 @@ files = [ [[package]] name = "comm" -version = "0.2.0" +version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, - {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, ] [package.dependencies] @@ -432,63 +458,63 @@ test = ["pytest"] [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.extras] @@ -496,47 +522,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, + {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, + {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, + {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, + {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, + {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, + {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -577,6 +612,23 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + [[package]] name = "dill" version = "0.3.7" @@ -591,15 +643,26 @@ files = [ [package.extras] graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "dynaconf" -version = "3.2.1" +version = "3.2.4" description = "The dynamic configurator for your Python Project" optional = false python-versions = ">=3.8" files = [ - {file = "dynaconf-3.2.1-py2.py3-none-any.whl", hash = "sha256:a4af12524f1fc527c6c0cdd4bb38cf83992d5155ad516baa98d9d01b7a731d09"}, - {file = "dynaconf-3.2.1.tar.gz", hash = "sha256:00dbd7541ca0f99bcb207cfc8aee0ac8f7d6b32bbb372e5b2865f0cb829b06c3"}, + {file = "dynaconf-3.2.4-py2.py3-none-any.whl", hash = "sha256:858f9806fab2409c4f5442614c2605d4c4071d5e5153b0e7f24a225f27465aed"}, + {file = "dynaconf-3.2.4.tar.gz", hash = "sha256:2e6adebaa587f4df9241a16a4bec3fda521154d26b15f3258fde753a592831b6"}, ] [package.extras] @@ -607,7 +670,7 @@ all = ["configobj", "hvac", "redis", "ruamel.yaml"] configobj = ["configobj"] ini = ["configobj"] redis = ["redis"] -test = ["configobj", "django", "flake8", "flake8-debugger", "flake8-print", "flake8-todo", "flask (>=0.12)", "hvac", "pep8-naming", "pytest", "pytest-cov", "pytest-mock", "pytest-xdist", "python-dotenv", "radon", "redis", "toml"] +test = ["configobj", "django", "flake8", "flake8-debugger", "flake8-print", "flake8-todo", "flask (>=0.12)", "hvac (>=1.1.0)", "pep8-naming", "pytest", "pytest-cov", "pytest-mock", "pytest-xdist", "python-dotenv", "radon", "redis", "toml"] toml = ["toml"] vault = ["hvac"] yaml = ["ruamel.yaml"] @@ -659,6 +722,22 @@ typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "funcy" version = "2.0" @@ -670,6 +749,89 @@ files = [ {file = "funcy-2.0.tar.gz", hash = "sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb"}, ] +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "grpcio" +version = "1.60.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, + {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, + {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, + {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, + {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, + {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, + {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, + {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, + {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, + {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, + {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, + {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, + {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, + {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, + {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, + {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, + {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, + {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, + {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, + {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, + {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, + {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, + {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, + {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.60.0)"] + [[package]] name = "h11" version = "0.14.0" @@ -681,6 +843,20 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "identify" +version = "2.5.33" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.6" @@ -692,6 +868,25 @@ files = [ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -705,13 +900,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.27.1" +version = "6.29.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, - {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, ] [package.dependencies] @@ -725,7 +920,7 @@ matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=20" +pyzmq = ">=24" tornado = ">=6.1" traitlets = ">=5.4.0" @@ -734,17 +929,17 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.18.1" +version = "8.20.0" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, + {file = "ipython-8.20.0-py3-none-any.whl", hash = "sha256:bc9716aad6f29f36c449e30821c9dd0c1c1a7b59ddcc26931685b87b4c569619"}, + {file = "ipython-8.20.0.tar.gz", hash = "sha256:2f21bd3fc1d51550c89ee3944ae04bbc7bc79e129ea0937da6e6c68bfdbf117a"}, ] [package.dependencies] @@ -760,17 +955,17 @@ stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "isodate" @@ -788,20 +983,17 @@ six = "*" [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jedi" @@ -846,13 +1038,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.5.0" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, - {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -866,24 +1058,23 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "kn-utils" -version = "0.2.5.dev112" +version = "0.2.7" description = "Shared code related to logging for research & development." optional = false -python-versions = ">=3.10,<3.11" +python-versions = ">=3.9,<3.12" files = [ - {file = "kn_utils-0.2.5.dev112-py3-none-any.whl", hash = "sha256:101e330e310103c5ebfc08d7aa15160f2803525e49a1320868bf4a5704537d0c"}, - {file = "kn_utils-0.2.5.dev112.tar.gz", hash = "sha256:12e731ad2347666bc72d19d33cdf8a3ba87cf3ae6479ca2b4005d344fa3e4933"}, + {file = "kn_utils-0.2.7-py3-none-any.whl", hash = "sha256:71753a8ee3730b456823bd1b6a863defc867c6ac8de17b6d5fdb3c0e08395ed9"}, + {file = "kn_utils-0.2.7.tar.gz", hash = "sha256:1f991db3a9936577fa4f32a2a256d6847e11df068aeb2709df3ab9aaf3b4151f"}, ] [package.dependencies] -dynaconf = "3.2.1" -funcy = "2.0" -loguru = "0.7" -pytest-loguru = "0.2" -rpds-py = "0.12.0" +dynaconf = ">=3.2.1,<4.0.0" +funcy = ">=2.0,<3.0" +loguru = ">=0.7,<0.8" +pytest-loguru = ">=0.2,<0.3" [package.extras] -models = ["mlflow (<2.8)", "spacy (>=3.7,<4.0)"] +models = ["mlflow (>=2.7,<3.0)", "spacy (>=3.7,<4.0)", "tensorflow (>=2.15.0,<3.0.0)"] [package.source] type = "legacy" @@ -892,13 +1083,13 @@ reference = "gitlab-research" [[package]] name = "loguru" -version = "0.7.0" +version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" files = [ - {file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"}, - {file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"}, + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] @@ -906,7 +1097,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "matplotlib-inline" @@ -935,19 +1126,20 @@ files = [ [[package]] name = "minio" -version = "7.2.0" +version = "7.2.3" description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" optional = false python-versions = "*" files = [ - {file = "minio-7.2.0-py3-none-any.whl", hash = "sha256:10656272c16156fa08436ce2b27e25e4134ef5142a8c259513ee26fb514531a6"}, - {file = "minio-7.2.0.tar.gz", hash = "sha256:4b015b018d10c1505f7c3e724fa7c2267760ac7bee6463a624cbf22cd272877b"}, + {file = "minio-7.2.3-py3-none-any.whl", hash = "sha256:e6b5ce0a9b4368da50118c3f0c4df5dbf33885d44d77fce6c0aa1c485e6af7a1"}, + {file = "minio-7.2.3.tar.gz", hash = "sha256:4971dfb1a71eeefd38e1ce2dc7edc4e6eb0f07f1c1d6d70c15457e3280cfc4b9"}, ] [package.dependencies] argon2-cffi = "*" certifi = "*" pycryptodome = "*" +typing-extensions = "*" urllib3 = "*" [[package]] @@ -963,13 +1155,272 @@ files = [ [[package]] name = "nest-asyncio" -version = "1.5.8" +version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, - {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "opentelemetry-api" +version = "1.22.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.22.0-py3-none-any.whl", hash = "sha256:43621514301a7e9f5d06dd8013a1b450f30c2e9372b8e30aaeb4562abf2ce034"}, + {file = "opentelemetry_api-1.22.0.tar.gz", hash = "sha256:15ae4ca925ecf9cfdfb7a709250846fbb08072260fca08ade78056c502b86bed"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.22.0" +description = "OpenTelemetry Collector Exporters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp-1.22.0-py3-none-any.whl", hash = "sha256:cb03a1cbf300e12b47690858be13dd26fe2f60b2610204959f3497cd6645e3a1"}, + {file = "opentelemetry_exporter_otlp-1.22.0.tar.gz", hash = "sha256:309a7d4dc67602801f15818e110ce452e78989886aaab5d37e7cf7f55f1d3d27"}, +] + +[package.dependencies] +opentelemetry-exporter-otlp-proto-grpc = "1.22.0" +opentelemetry-exporter-otlp-proto-http = "1.22.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.22.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl", hash = "sha256:3f2538bec5312587f8676c332b3747f54c89fe6364803a807e217af4603201fa"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.22.0.tar.gz", hash = "sha256:71ae2f81bc6d6fe408d06388826edc8933759b2ca3a97d24054507dc7cfce52d"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +opentelemetry-proto = "1.22.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.22.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.22.0-py3-none-any.whl", hash = "sha256:b5bcadc129272004316a455e9081216d3380c1fc2231a928ea6a70aa90e173fb"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.22.0.tar.gz", hash = "sha256:1e0e5aa4bbabc74942f06f268deffd94851d12a8dc30b02527472ef1729fe5b1"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.22.0" +opentelemetry-proto = "1.22.0" +opentelemetry-sdk = ">=1.22.0,<1.23.0" + +[package.extras] +test = ["pytest-grpc"] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.22.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.22.0-py3-none-any.whl", hash = "sha256:e002e842190af45b91dc55a97789d0b98e4308c88d886b16049ee90e17a4d396"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.22.0.tar.gz", hash = "sha256:79ed108981ec68d5f7985355bca32003c2f3a5be1534a96d62d5861b758a82f4"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.22.0" +opentelemetry-proto = "1.22.0" +opentelemetry-sdk = ">=1.22.0,<1.23.0" +requests = ">=2.7,<3.0" + +[package.extras] +test = ["responses (==0.22.0)"] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.43b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation-0.43b0-py3-none-any.whl", hash = "sha256:0ff1334d7e359e27640e9d420024efeb73eacae464309c2e14ede7ba6c93967e"}, + {file = "opentelemetry_instrumentation-0.43b0.tar.gz", hash = "sha256:c3755da6c4be8033be0216d0501e11f4832690f4e2eca5a3576fbf113498f0f6"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-flask" +version = "0.43b0" +description = "Flask instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_flask-0.43b0-py3-none-any.whl", hash = "sha256:537aecdd8de1c00f9b408d7a02b85e9ba55cda1ed95f4712199478ceaa4dfb2c"}, + {file = "opentelemetry_instrumentation_flask-0.43b0.tar.gz", hash = "sha256:ea3779f157a7efe82d0e10a59af64440e34b5e8004eaee08d7d61bbb889701fa"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-instrumentation-wsgi = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" +packaging = ">=21.0" + +[package.extras] +instruments = ["flask (>=1.0,<3.0)", "werkzeug (<3.0.0)"] +test = ["markupsafe (==2.1.2)", "opentelemetry-instrumentation-flask[instruments]", "opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-instrumentation-pika" +version = "0.43b0" +description = "OpenTelemetry pika instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_pika-0.43b0-py3-none-any.whl", hash = "sha256:78e4bbdd4251ad7208e90911e5fe66e9040188a3085f99d5f152598773628088"}, + {file = "opentelemetry_instrumentation_pika-0.43b0.tar.gz", hash = "sha256:49beda8539c0b0b82b6076d0f1b3e274ae80c4571295fbc00512bbe8cbda55eb"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.5,<2.0" +packaging = ">=20.0" +wrapt = ">=1.0.0,<2.0.0" + +[package.extras] +instruments = ["pika (>=0.12.0)"] +test = ["opentelemetry-instrumentation-pika[instruments]", "opentelemetry-test-utils (==0.43b0)", "pytest", "wrapt (>=1.0.0,<2.0.0)"] + +[[package]] +name = "opentelemetry-instrumentation-requests" +version = "0.43b0" +description = "OpenTelemetry requests instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_requests-0.43b0-py3-none-any.whl", hash = "sha256:cd9d0862ab8c8892a207dd828134f23c6a5014756f0f055120412aa00be7732d"}, + {file = "opentelemetry_instrumentation_requests-0.43b0.tar.gz", hash = "sha256:fd92c278d463dbad39cdc42e4f5871de8f66560cf9b40191b554a293aa6faf49"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["requests (>=2.0,<3.0)"] +test = ["httpretty (>=1.0,<2.0)", "opentelemetry-instrumentation-requests[instruments]", "opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-instrumentation-wsgi" +version = "0.43b0" +description = "WSGI Middleware for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_wsgi-0.43b0-py3-none-any.whl", hash = "sha256:0b7511469daa29a6e75b9cc54b4d01a9bb46aa1f964471dc3ee3f06ff39f94b2"}, + {file = "opentelemetry_instrumentation_wsgi-0.43b0.tar.gz", hash = "sha256:3a1cf045f7ccf04987a89cdd49eda93e9195de4c8b73be228a9e565ec3ab453c"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +test = ["opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.22.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_proto-1.22.0-py3-none-any.whl", hash = "sha256:ce7188d22c75b6d0fe53e7fb58501613d0feade5139538e79dedd9420610fa0c"}, + {file = "opentelemetry_proto-1.22.0.tar.gz", hash = "sha256:9ec29169286029f17ca34ec1f3455802ffb90131642d2f545ece9a63e8f69003"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.22.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_sdk-1.22.0-py3-none-any.whl", hash = "sha256:a730555713d7c8931657612a88a141e3a4fe6eb5523d9e2d5a8b1e673d76efa6"}, + {file = "opentelemetry_sdk-1.22.0.tar.gz", hash = "sha256:45267ac1f38a431fc2eb5d6e0c0d83afc0b78de57ac345488aa58c28c17991d0"}, +] + +[package.dependencies] +opentelemetry-api = "1.22.0" +opentelemetry-semantic-conventions = "0.43b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.43b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl", hash = "sha256:291284d7c1bf15fdaddf309b3bd6d3b7ce12a253cec6d27144439819a15d8445"}, + {file = "opentelemetry_semantic_conventions-0.43b0.tar.gz", hash = "sha256:b9576fb890df479626fa624e88dde42d3d60b8b6c8ae1152ad157a8b97358635"}, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.43b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_util_http-0.43b0-py3-none-any.whl", hash = "sha256:f25a820784b030f6cb86b3d76e5676c769b75ed3f55a210bcdae0a5e175ebadb"}, + {file = "opentelemetry_util_http-0.43b0.tar.gz", hash = "sha256:3ff6ab361dbe99fc81200d625603c0fb890c055c6e416a3e6d661ddf47a6c7f7"}, ] [[package]] @@ -1000,13 +1451,13 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -1041,13 +1492,13 @@ twisted = ["twisted"] [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1069,6 +1520,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.6.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "prometheus-client" version = "0.18.0" @@ -1085,41 +1554,61 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.41" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, - {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "protobuf" +version = "4.25.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, +] + [[package]] name = "psutil" -version = "5.9.6" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -1174,43 +1663,43 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.0" +version = "3.20.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, - {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] @@ -1366,20 +1855,20 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.0.2" +version = "3.0.3" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.0.2-py3-none-any.whl", hash = "sha256:60ed5f3a9ff8b61839ff0348b3624ceeb9e6c2a92c514d81c9cc273da3b6bcda"}, - {file = "pylint-3.0.2.tar.gz", hash = "sha256:0d4c286ef6d2f66c8bfb527a7f8a629009e42c99707dec821a03e1b51a4c1496"}, + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, ] [package.dependencies] astroid = ">=3.0.1,<=3.1.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = {version = ">=0.2", markers = "python_version < \"3.11\""} -isort = ">=4.2.5,<6" +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} @@ -1391,13 +1880,13 @@ testutils = ["gitpython (>3)"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1462,106 +1951,165 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "pyzmq" -version = "25.1.1" +version = "25.1.2" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, - {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, - {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, - {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, - {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, - {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, - {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, - {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, - {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, - {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, - {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, - {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, ] [package.dependencies] @@ -1604,113 +2152,21 @@ decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" [[package]] -name = "rpds-py" -version = "0.12.0" -description = "Python bindings to Rust's persistent data structures (rpds)" +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.12.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c694bee70ece3b232df4678448fdda245fd3b1bb4ba481fb6cd20e13bb784c46"}, - {file = "rpds_py-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30e5ce9f501fb1f970e4a59098028cf20676dee64fc496d55c33e04bbbee097d"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d72a4315514e5a0b9837a086cb433b004eea630afb0cc129de76d77654a9606f"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eebaf8c76c39604d52852366249ab807fe6f7a3ffb0dd5484b9944917244cdbe"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a239303acb0315091d54c7ff36712dba24554993b9a93941cf301391d8a997ee"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced40cdbb6dd47a032725a038896cceae9ce267d340f59508b23537f05455431"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c8c0226c71bd0ce9892eaf6afa77ae8f43a3d9313124a03df0b389c01f832de"}, - {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8e11715178f3608874508f08e990d3771e0b8c66c73eb4e183038d600a9b274"}, - {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5210a0018c7e09c75fa788648617ebba861ae242944111d3079034e14498223f"}, - {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:171d9a159f1b2f42a42a64a985e4ba46fc7268c78299272ceba970743a67ee50"}, - {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:57ec6baec231bb19bb5fd5fc7bae21231860a1605174b11585660236627e390e"}, - {file = "rpds_py-0.12.0-cp310-none-win32.whl", hash = "sha256:7188ddc1a8887194f984fa4110d5a3d5b9b5cd35f6bafdff1b649049cbc0ce29"}, - {file = "rpds_py-0.12.0-cp310-none-win_amd64.whl", hash = "sha256:1e04581c6117ad9479b6cfae313e212fe0dfa226ac727755f0d539cd54792963"}, - {file = "rpds_py-0.12.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0a38612d07a36138507d69646c470aedbfe2b75b43a4643f7bd8e51e52779624"}, - {file = "rpds_py-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f12d69d568f5647ec503b64932874dade5a20255736c89936bf690951a5e79f5"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8a1d990dc198a6c68ec3d9a637ba1ce489b38cbfb65440a27901afbc5df575"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c567c664fc2f44130a20edac73e0a867f8e012bf7370276f15c6adc3586c37c"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e9e976e0dbed4f51c56db10831c9623d0fd67aac02853fe5476262e5a22acb7"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efddca2d02254a52078c35cadad34762adbae3ff01c6b0c7787b59d038b63e0d"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9e7f29c00577aff6b318681e730a519b235af292732a149337f6aaa4d1c5e31"}, - {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:389c0e38358fdc4e38e9995e7291269a3aead7acfcf8942010ee7bc5baee091c"}, - {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33ab498f9ac30598b6406e2be1b45fd231195b83d948ebd4bd77f337cb6a2bff"}, - {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d56b1cd606ba4cedd64bb43479d56580e147c6ef3f5d1c5e64203a1adab784a2"}, - {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fa73ed22c40a1bec98d7c93b5659cd35abcfa5a0a95ce876b91adbda170537c"}, - {file = "rpds_py-0.12.0-cp311-none-win32.whl", hash = "sha256:dbc25baa6abb205766fb8606f8263b02c3503a55957fcb4576a6bb0a59d37d10"}, - {file = "rpds_py-0.12.0-cp311-none-win_amd64.whl", hash = "sha256:c6b52b7028b547866c2413f614ee306c2d4eafdd444b1ff656bf3295bf1484aa"}, - {file = "rpds_py-0.12.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:9620650c364c01ed5b497dcae7c3d4b948daeae6e1883ae185fef1c927b6b534"}, - {file = "rpds_py-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2124f9e645a94ab7c853bc0a3644e0ca8ffbe5bb2d72db49aef8f9ec1c285733"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281c8b219d4f4b3581b918b816764098d04964915b2f272d1476654143801aa2"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27ccc93c7457ef890b0dd31564d2a05e1aca330623c942b7e818e9e7c2669ee4"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1c562a9bb72244fa767d1c1ab55ca1d92dd5f7c4d77878fee5483a22ffac808"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e57919c32ee295a2fca458bb73e4b20b05c115627f96f95a10f9f5acbd61172d"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa35ad36440aaf1ac8332b4a4a433d4acd28f1613f0d480995f5cfd3580e90b7"}, - {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e6aea5c0eb5b0faf52c7b5c4a47c8bb64437173be97227c819ffa31801fa4e34"}, - {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:81cf9d306c04df1b45971c13167dc3bad625808aa01281d55f3cf852dde0e206"}, - {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08e6e7ff286254016b945e1ab632ee843e43d45e40683b66dd12b73791366dd1"}, - {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d0a675a7acbbc16179188d8c6d0afb8628604fc1241faf41007255957335a0b"}, - {file = "rpds_py-0.12.0-cp312-none-win32.whl", hash = "sha256:b2287c09482949e0ca0c0eb68b2aca6cf57f8af8c6dfd29dcd3bc45f17b57978"}, - {file = "rpds_py-0.12.0-cp312-none-win_amd64.whl", hash = "sha256:8015835494b21aa7abd3b43fdea0614ee35ef6b03db7ecba9beb58eadf01c24f"}, - {file = "rpds_py-0.12.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6174d6ad6b58a6bcf67afbbf1723420a53d06c4b89f4c50763d6fa0a6ac9afd2"}, - {file = "rpds_py-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a689e1ded7137552bea36305a7a16ad2b40be511740b80748d3140614993db98"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45321224144c25a62052035ce96cbcf264667bcb0d81823b1bbc22c4addd194"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa32205358a76bf578854bf31698a86dc8b2cb591fd1d79a833283f4a403f04b"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91bd2b7cf0f4d252eec8b7046fa6a43cee17e8acdfc00eaa8b3dbf2f9a59d061"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3acadbab8b59f63b87b518e09c4c64b142e7286b9ca7a208107d6f9f4c393c5c"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:429349a510da82c85431f0f3e66212d83efe9fd2850f50f339341b6532c62fe4"}, - {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05942656cb2cb4989cd50ced52df16be94d344eae5097e8583966a1d27da73a5"}, - {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0c5441b7626c29dbd54a3f6f3713ec8e956b009f419ffdaaa3c80eaf98ddb523"}, - {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b6b0e17d39d21698185097652c611f9cf30f7c56ccec189789920e3e7f1cee56"}, - {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3b7a64d43e2a1fa2dd46b678e00cabd9a49ebb123b339ce799204c44a593ae1c"}, - {file = "rpds_py-0.12.0-cp38-none-win32.whl", hash = "sha256:e5bbe011a2cea9060fef1bb3d668a2fd8432b8888e6d92e74c9c794d3c101595"}, - {file = "rpds_py-0.12.0-cp38-none-win_amd64.whl", hash = "sha256:bec29b801b4adbf388314c0d050e851d53762ab424af22657021ce4b6eb41543"}, - {file = "rpds_py-0.12.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1096ca0bf2d3426cbe79d4ccc91dc5aaa73629b08ea2d8467375fad8447ce11a"}, - {file = "rpds_py-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48aa98987d54a46e13e6954880056c204700c65616af4395d1f0639eba11764b"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7979d90ee2190d000129598c2b0c82f13053dba432b94e45e68253b09bb1f0f6"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88857060b690a57d2ea8569bca58758143c8faa4639fb17d745ce60ff84c867e"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4eb74d44776b0fb0782560ea84d986dffec8ddd94947f383eba2284b0f32e35e"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f62581d7e884dd01ee1707b7c21148f61f2febb7de092ae2f108743fcbef5985"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f5dcb658d597410bb7c967c1d24eaf9377b0d621358cbe9d2ff804e5dd12e81"}, - {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bf9acce44e967a5103fcd820fc7580c7b0ab8583eec4e2051aec560f7b31a63"}, - {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:240687b5be0f91fbde4936a329c9b7589d9259742766f74de575e1b2046575e4"}, - {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25740fb56e8bd37692ed380e15ec734be44d7c71974d8993f452b4527814601e"}, - {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a54917b7e9cd3a67e429a630e237a90b096e0ba18897bfb99ee8bd1068a5fea0"}, - {file = "rpds_py-0.12.0-cp39-none-win32.whl", hash = "sha256:b92aafcfab3d41580d54aca35a8057341f1cfc7c9af9e8bdfc652f83a20ced31"}, - {file = "rpds_py-0.12.0-cp39-none-win_amd64.whl", hash = "sha256:cd316dbcc74c76266ba94eb021b0cc090b97cca122f50bd7a845f587ff4bf03f"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0853da3d5e9bc6a07b2486054a410b7b03f34046c123c6561b535bb48cc509e1"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cb41ad20064e18a900dd427d7cf41cfaec83bcd1184001f3d91a1f76b3fcea4e"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bf7e7ae61957d5c4026b486be593ed3ec3dca3e5be15e0f6d8cf5d0a4990"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a952ae3eb460c6712388ac2ec706d24b0e651b9396d90c9a9e0a69eb27737fdc"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bedd91ae1dd142a4dc15970ed2c729ff6c73f33a40fa84ed0cdbf55de87c777"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:761531076df51309075133a6bc1db02d98ec7f66e22b064b1d513bc909f29743"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2baa6be130e8a00b6cbb9f18a33611ec150b4537f8563bddadb54c1b74b8193"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f05450fa1cd7c525c0b9d1a7916e595d3041ac0afbed2ff6926e5afb6a781b7f"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:81c4d1a3a564775c44732b94135d06e33417e829ff25226c164664f4a1046213"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e888be685fa42d8b8a3d3911d5604d14db87538aa7d0b29b1a7ea80d354c732d"}, - {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6f8d7fe73d1816eeb5378409adc658f9525ecbfaf9e1ede1e2d67a338b0c7348"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0831d3ecdea22e4559cc1793f22e77067c9d8c451d55ae6a75bf1d116a8e7f42"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:513ccbf7420c30e283c25c82d5a8f439d625a838d3ba69e79a110c260c46813f"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:301bd744a1adaa2f6a5e06c98f1ac2b6f8dc31a5c23b838f862d65e32fca0d4b"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8832a4f83d4782a8f5a7b831c47e8ffe164e43c2c148c8160ed9a6d630bc02a"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2416ed743ec5debcf61e1242e012652a4348de14ecc7df3512da072b074440"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35585a8cb5917161f42c2104567bb83a1d96194095fc54a543113ed5df9fa436"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d389ff1e95b6e46ebedccf7fd1fadd10559add595ac6a7c2ea730268325f832c"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b007c2444705a2dc4a525964fd4dd28c3320b19b3410da6517cab28716f27d3"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:188912b22b6c8225f4c4ffa020a2baa6ad8fabb3c141a12dbe6edbb34e7f1425"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b4cf9ab9a0ae0cb122685209806d3f1dcb63b9fccdf1424fb42a129dc8c2faa"}, - {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2d34a5450a402b00d20aeb7632489ffa2556ca7b26f4a63c35f6fccae1977427"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:466030a42724780794dea71eb32db83cc51214d66ab3fb3156edd88b9c8f0d78"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68172622a5a57deb079a2c78511c40f91193548e8ab342c31e8cb0764d362459"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54cdfcda59251b9c2f87a05d038c2ae02121219a04d4a1e6fc345794295bdc07"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b75b912a0baa033350367a8a07a8b2d44fd5b90c890bfbd063a8a5f945f644b"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47aeceb4363851d17f63069318ba5721ae695d9da55d599b4d6fb31508595278"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0525847f83f506aa1e28eb2057b696fe38217e12931c8b1b02198cfe6975e142"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efbe0b5e0fd078ed7b005faa0170da4f72666360f66f0bb2d7f73526ecfd99f9"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0fadfdda275c838cba5102c7f90a20f2abd7727bf8f4a2b654a5b617529c5c18"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:56dd500411d03c5e9927a1eb55621e906837a83b02350a9dc401247d0353717c"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:6915fc9fa6b3ec3569566832e1bb03bd801c12cea030200e68663b9a87974e76"}, - {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5f1519b080d8ce0a814f17ad9fb49fb3a1d4d7ce5891f5c85fc38631ca3a8dc4"}, - {file = "rpds_py-0.12.0.tar.gz", hash = "sha256:7036316cc26b93e401cedd781a579be606dad174829e6ad9e9c5a0da6e036f80"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1793,33 +2249,33 @@ files = [ [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] name = "traitlets" -version = "5.14.0" +version = "5.14.1" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, - {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [package.extras] @@ -1828,13 +2284,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -1872,15 +2328,35 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -1897,7 +2373,101 @@ files = [ [package.extras] dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "947961b5c6b624da6ff0644fa320e8854255dc893e0ffb697b67576c4da86eb8" +content-hash = "1b4d543363bcda2ec51a2e29e849ceac983fa0e7933de58ec6bf881614bb939f" diff --git a/pyinfra/config/validators.py b/pyinfra/config/validators.py index e0ae2af..4db9f84 100644 --- a/pyinfra/config/validators.py +++ b/pyinfra/config/validators.py @@ -44,3 +44,8 @@ webserver_validators = [ Validator("webserver.host", must_exist=True, is_type_of=str), Validator("webserver.port", must_exist=True, is_type_of=int), ] + +opentelemetry_validators = [ + Validator("tracing.endpoint", must_exist=True, is_type_of=str), + Validator("tracing.service.name", must_exist=True, is_type_of=str), +] diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index 767d723..f23a179 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -4,17 +4,23 @@ import json import logging import signal import sys -from typing import Union, Callable +from typing import Callable, Union import pika import pika.exceptions from dynaconf import Dynaconf from kn_utils.logging import logger +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.pika import PikaInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry -from pyinfra.config.validators import queue_manager_validators from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import queue_manager_validators pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.WARNING) # disables non-informative pika log clutter @@ -36,6 +42,9 @@ class QueueManager: self.channel: Union[BlockingChannel, None] = None self.connection_sleep = settings.rabbitmq.connection_sleep + self.tracing_endpoint = settings.tracing.endpoint + self.service_name = settings.tracing.service_name + atexit.register(self.stop_consuming) signal.signal(signal.SIGTERM, self._handle_stop_signal) signal.signal(signal.SIGINT, self._handle_stop_signal) @@ -76,6 +85,17 @@ class QueueManager: logger.info("Connection to RabbitMQ established, channel open.") + resource = Resource(attributes={"service.name": self.service_name}) + trace.set_tracer_provider(TracerProvider(resource=resource)) + tracer = trace.get_tracer(__name__) + + otlp_exporter = OTLPSpanExporter(endpoint=self.tracing_endpoint) + span_processor = BatchSpanProcessor(otlp_exporter) + trace.get_tracer_provider().add_span_processor(span_processor) + + pika_instrumentation = PikaInstrumentor() + pika_instrumentation.instrument_channel(channel=self.channel) + def is_ready(self): self.establish_connection() return self.channel.is_open diff --git a/pyproject.toml b/pyproject.toml index 580903b..46761c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,6 @@ version = "1.10.0" description = "" authors = ["Team Research "] license = "All rights reseverd" -readme = "README.md" [tool.poetry.dependencies] python = ">=3.10,<3.11" @@ -31,6 +30,18 @@ black = "^23.10" pylint = "^3" coverage = "^7.3" requests = "^2.31" +pre-commit = "^3.6.0" + + +[tool.poetry.group.telemetry.dependencies] +opentelemetry-instrumentation-pika = "^0.43b0" +opentelemetry-exporter-otlp = "^1.22.0" +opentelemetry-instrumentation = "^0.43b0" +opentelemetry-api = "^1.22.0" +opentelemetry-sdk = "^1.22.0" +opentelemetry-exporter-otlp-proto-http = "^1.22.0" +opentelemetry-instrumentation-flask = "^0.43b0" +opentelemetry-instrumentation-requests = "^0.43b0" [tool.pytest.ini_options] minversion = "6.0" @@ -39,6 +50,31 @@ testpaths = ["tests", "integration"] log_cli = 1 log_cli_level = "DEBUG" +[tool.mypy] +exclude = ['.venv'] + +[tool.black] +line-length = 120 +target-version = ["py310"] + +[tool.isort] +profile = "black" + +[tool.pylint.format] +max-line-length = 120 +disable = [ + "C0114", + "C0325", + "R0801", + "R0902", + "R0903", + "R0904", + "R0913", + "R0914", + "W0511" +] +docstring-min-length = 3 + [[tool.poetry.source]] name = "PyPI" priority = "primary" From 739a7c073155e50024122d083df75e9d19503d09 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 24 Jan 2024 12:10:46 +0100 Subject: [PATCH 24/39] feat(opentelemetry): add queue instrumenting test --- pyinfra/config/validators.py | 4 +- pyinfra/queue/manager.py | 20 --------- tests/conftest.py | 26 ++++++++++++ tests/unit_test/opentelemetry_test.py | 58 +++++++++++++++++++++++++++ tests/unit_test/queue_test.py | 23 ----------- 5 files changed, 86 insertions(+), 45 deletions(-) create mode 100644 tests/unit_test/opentelemetry_test.py diff --git a/pyinfra/config/validators.py b/pyinfra/config/validators.py index 4db9f84..bb1204b 100644 --- a/pyinfra/config/validators.py +++ b/pyinfra/config/validators.py @@ -46,6 +46,6 @@ webserver_validators = [ ] opentelemetry_validators = [ - Validator("tracing.endpoint", must_exist=True, is_type_of=str), - Validator("tracing.service.name", must_exist=True, is_type_of=str), + Validator("tracing.opentelemetry.endpoint", must_exist=True, is_type_of=str), + Validator("tracing.opentelemetry.service_name", must_exist=True, is_type_of=str), ] diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index f23a179..e284438 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -10,12 +10,6 @@ import pika import pika.exceptions from dynaconf import Dynaconf from kn_utils.logging import logger -from opentelemetry import trace -from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter -from opentelemetry.instrumentation.pika import PikaInstrumentor -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor from pika.adapters.blocking_connection import BlockingChannel, BlockingConnection from retry import retry @@ -42,9 +36,6 @@ class QueueManager: self.channel: Union[BlockingChannel, None] = None self.connection_sleep = settings.rabbitmq.connection_sleep - self.tracing_endpoint = settings.tracing.endpoint - self.service_name = settings.tracing.service_name - atexit.register(self.stop_consuming) signal.signal(signal.SIGTERM, self._handle_stop_signal) signal.signal(signal.SIGINT, self._handle_stop_signal) @@ -85,17 +76,6 @@ class QueueManager: logger.info("Connection to RabbitMQ established, channel open.") - resource = Resource(attributes={"service.name": self.service_name}) - trace.set_tracer_provider(TracerProvider(resource=resource)) - tracer = trace.get_tracer(__name__) - - otlp_exporter = OTLPSpanExporter(endpoint=self.tracing_endpoint) - span_processor = BatchSpanProcessor(otlp_exporter) - trace.get_tracer_provider().add_span_processor(span_processor) - - pika_instrumentation = PikaInstrumentor() - pika_instrumentation.instrument_channel(channel=self.channel) - def is_ready(self): self.establish_connection() return self.channel.is_open diff --git a/tests/conftest.py b/tests/conftest.py index 4939c83..796a508 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,9 @@ +import json + import pytest from pyinfra.config.loader import load_settings, pyinfra_config_path +from pyinfra.queue.manager import QueueManager from pyinfra.storage.connection import get_storage_from_settings @@ -18,3 +21,26 @@ def storage(storage_backend, settings): yield storage storage.clear_bucket() + + +@pytest.fixture(scope="session") +def queue_manager(settings): + settings.rabbitmq_heartbeat = 10 + settings.connection_sleep = 5 + queue_manager = QueueManager(settings) + yield queue_manager + + +@pytest.fixture +def input_message(): + return json.dumps( + { + "targetFilePath": "test/target.json.gz", + "responseFilePath": "test/response.json.gz", + } + ) + + +@pytest.fixture +def stop_message(): + return "STOP" diff --git a/tests/unit_test/opentelemetry_test.py b/tests/unit_test/opentelemetry_test.py new file mode 100644 index 0000000..a21c563 --- /dev/null +++ b/tests/unit_test/opentelemetry_test.py @@ -0,0 +1,58 @@ +import json +from time import sleep + +from opentelemetry.instrumentation.pika import PikaInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter, SpanExportResult +from opentelemetry import trace + + +class MySpanExporter(SpanExporter): + def __init__(self): + self.traces = [] + + def export(self, spans): + for span in spans: + self.traces.append(span.to_json()) + return SpanExportResult.SUCCESS + + def shutdown(self): + pass + + +class TestOpenTelemetry: + def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message): + service_name = "deine-mutter-serivce" + + resource = Resource(attributes={"service.name": service_name}) + provider = TracerProvider(resource=resource) + # processor = BatchSpanProcessor(ConsoleSpanExporter()) + exporter = MySpanExporter() + processor = BatchSpanProcessor(exporter) + provider.add_span_processor(processor) + # otlp_exporter = OTLPSpanExporter(endpoint=self.tracing_endpoint) + # span_processor = BatchSpanProcessor(otlp_exporter) + # trace.get_tracer_provider().add_span_processor(span_processor) + + # Sets the global default tracer provider + trace.set_tracer_provider(provider) + + # Creates a tracer from the global tracer provider + tracer = trace.get_tracer("my.tracer.name") + + PikaInstrumentor().instrument() + + queue_manager.purge_queues() + queue_manager.publish_message_to_input_queue(input_message) + queue_manager.publish_message_to_input_queue(stop_message) + + def callback(_): + sleep(2) + return {"flat": "earth"} + + queue_manager.start_consuming(callback) + + for exported_trace in exporter.traces: + exported_trace = json.loads(exported_trace) + assert exported_trace["resource"]["attributes"]["service.name"] == service_name \ No newline at end of file diff --git a/tests/unit_test/queue_test.py b/tests/unit_test/queue_test.py index 720c6c3..492b24f 100644 --- a/tests/unit_test/queue_test.py +++ b/tests/unit_test/queue_test.py @@ -6,8 +6,6 @@ import pika import pytest from kn_utils.logging import logger -from pyinfra.queue.manager import QueueManager - logger.remove() logger.add(sink=stdout, level="DEBUG") @@ -20,27 +18,6 @@ def make_callback(process_time): return callback -@pytest.fixture(scope="session") -def queue_manager(settings): - settings.rabbitmq_heartbeat = 10 - settings.connection_sleep = 5 - queue_manager = QueueManager(settings) - yield queue_manager - - -@pytest.fixture -def input_message(): - return json.dumps({ - "targetFilePath": "test/target.json.gz", - "responseFilePath": "test/response.json.gz", - }) - - -@pytest.fixture -def stop_message(): - return "STOP" - - class TestQueueManager: def test_processing_of_several_messages(self, queue_manager, input_message, stop_message): queue_manager.purge_queues() From a4156668307ad0ee01edd445d3f6ae9030107b4d Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 24 Jan 2024 14:00:11 +0100 Subject: [PATCH 25/39] feat(opentelemetry): put logic in own module --- pyinfra/utils/opentelemetry.py | 64 +++++++++++++++++++++++++++ tests/unit_test/opentelemetry_test.py | 48 ++++---------------- 2 files changed, 73 insertions(+), 39 deletions(-) create mode 100644 pyinfra/utils/opentelemetry.py diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py new file mode 100644 index 0000000..3c8f46e --- /dev/null +++ b/pyinfra/utils/opentelemetry.py @@ -0,0 +1,64 @@ +import json + +from dynaconf import Dynaconf +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.pika import PikaInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult + +from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import opentelemetry_validators + + +class JsonSpanExporter(SpanExporter): + def __init__(self): + self.traces = [] + + def export(self, spans): + for span in spans: + self.traces.append(json.loads(span.to_json())) + return SpanExportResult.SUCCESS + + def shutdown(self): + pass + + +def setup_trace(settings: Dynaconf, service_name: str = None, exporter: SpanExporter = None): + service_name = service_name or settings.tracing.opentelemetry.service_name + exporter = exporter or get_exporter(settings) + + resource = Resource(attributes={"service.name": service_name}) + provider = TracerProvider(resource=resource) + + processor = BatchSpanProcessor(exporter) + + provider.add_span_processor(processor) + + trace.set_tracer_provider(provider) + + +def get_exporter(settings: Dynaconf): + validate_settings(settings, validators=opentelemetry_validators) + + if settings.tracing.opentelemetry.exporter == "json": + return JsonSpanExporter() + elif settings.tracing.opentelemetry.exporter == "otlp": + return OTLPSpanExporter(endpoint=settings.metrics.opentelemetry.endpoint) + elif settings.tracing.opentelemetry.exporter == "console": + return ConsoleSpanExporter() + else: + raise ValueError( + f"Invalid OpenTelemetry exporter {settings.tracing.opentelemetry.exporter}. " + f"Valid values are 'json', 'otlp' and 'console'." + ) + + +def instrument_pika(): + PikaInstrumentor().instrument() + + +# def instrument_app(app: FastAPI): +# FastAPIInstrumentor().instrument_app(app) diff --git a/tests/unit_test/opentelemetry_test.py b/tests/unit_test/opentelemetry_test.py index a21c563..a1c2599 100644 --- a/tests/unit_test/opentelemetry_test.py +++ b/tests/unit_test/opentelemetry_test.py @@ -1,47 +1,16 @@ -import json from time import sleep -from opentelemetry.instrumentation.pika import PikaInstrumentor -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter, SpanExportResult -from opentelemetry import trace - - -class MySpanExporter(SpanExporter): - def __init__(self): - self.traces = [] - - def export(self, spans): - for span in spans: - self.traces.append(span.to_json()) - return SpanExportResult.SUCCESS - - def shutdown(self): - pass +from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika class TestOpenTelemetry: - def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message): - service_name = "deine-mutter-serivce" + def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message, settings): + settings.tracing.opentelemetry.exporter = "json" - resource = Resource(attributes={"service.name": service_name}) - provider = TracerProvider(resource=resource) - # processor = BatchSpanProcessor(ConsoleSpanExporter()) - exporter = MySpanExporter() - processor = BatchSpanProcessor(exporter) - provider.add_span_processor(processor) - # otlp_exporter = OTLPSpanExporter(endpoint=self.tracing_endpoint) - # span_processor = BatchSpanProcessor(otlp_exporter) - # trace.get_tracer_provider().add_span_processor(span_processor) + exporter = get_exporter(settings) + setup_trace(settings, exporter=exporter) - # Sets the global default tracer provider - trace.set_tracer_provider(provider) - - # Creates a tracer from the global tracer provider - tracer = trace.get_tracer("my.tracer.name") - - PikaInstrumentor().instrument() + instrument_pika() queue_manager.purge_queues() queue_manager.publish_message_to_input_queue(input_message) @@ -54,5 +23,6 @@ class TestOpenTelemetry: queue_manager.start_consuming(callback) for exported_trace in exporter.traces: - exported_trace = json.loads(exported_trace) - assert exported_trace["resource"]["attributes"]["service.name"] == service_name \ No newline at end of file + assert ( + exported_trace["resource"]["attributes"]["service.name"] == settings.tracing.opentelemetry.service_name + ) From da163897c46c0720dd5714f46ef8c7b42f037edb Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 24 Jan 2024 14:26:10 +0100 Subject: [PATCH 26/39] feat(opentelemetry): add fastapi instumentation --- poetry.lock | 63 ++++++++++++++++++++++++++- pyinfra/utils/opentelemetry.py | 9 +++- pyproject.toml | 1 + tests/unit_test/opentelemetry_test.py | 43 ++++++++++++++++-- 4 files changed, 109 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d9b64d..8efbb41 100644 --- a/poetry.lock +++ b/poetry.lock @@ -101,6 +101,23 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "astroid" version = "3.0.2" @@ -1287,6 +1304,50 @@ opentelemetry-api = ">=1.4,<2.0" setuptools = ">=16.0" wrapt = ">=1.0.0,<2.0.0" +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.43b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl", hash = "sha256:1f593829fa039e9367820736fb063e92acd15c25b53d7bcb5d319971b8e93fd7"}, + {file = "opentelemetry_instrumentation_asgi-0.43b0.tar.gz", hash = "sha256:3f6f19333dca31ef696672e4e36cb1c2613c71dc7e847c11ff36a37e1130dadc"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] +test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.43b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl", hash = "sha256:b79c044df68a52e07b35fa12a424e7cc0dd27ff0a171c5fdcc41dea9de8fc938"}, + {file = "opentelemetry_instrumentation_fastapi-0.43b0.tar.gz", hash = "sha256:2afaaf470622e1a2732182c68f6d2431ffe5e026a7edacd0f83605632b66347f"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-instrumentation-asgi = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] +test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.43b0)", "requests (>=2.23,<3.0)"] + [[package]] name = "opentelemetry-instrumentation-flask" version = "0.43b0" @@ -2470,4 +2531,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "1b4d543363bcda2ec51a2e29e849ceac983fa0e7933de58ec6bf881614bb939f" +content-hash = "2e56e0e3c159f5d8fcc95b8623d56b1dbf94318230f5550a4c5aa15c288dfc84" diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index 3c8f46e..c96d8e5 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -1,8 +1,10 @@ import json from dynaconf import Dynaconf +from fastapi import FastAPI from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from opentelemetry.instrumentation.pika import PikaInstrumentor from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider @@ -37,6 +39,9 @@ def setup_trace(settings: Dynaconf, service_name: str = None, exporter: SpanExpo provider.add_span_processor(processor) + # TODO: This produces a warning if trying to set the provider twice. + # "WARNING opentelemetry.trace:__init__.py:521 Overriding of current TracerProvider is not allowed" + # This doesn't affect our current usage but should be fixed eventually. trace.set_tracer_provider(provider) @@ -60,5 +65,5 @@ def instrument_pika(): PikaInstrumentor().instrument() -# def instrument_app(app: FastAPI): -# FastAPIInstrumentor().instrument_app(app) +def instrument_app(app: FastAPI): + FastAPIInstrumentor().instrument_app(app) diff --git a/pyproject.toml b/pyproject.toml index 46761c0..55644d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ opentelemetry-sdk = "^1.22.0" opentelemetry-exporter-otlp-proto-http = "^1.22.0" opentelemetry-instrumentation-flask = "^0.43b0" opentelemetry-instrumentation-requests = "^0.43b0" +opentelemetry-instrumentation-fastapi = "^0.43b0" [tool.pytest.ini_options] minversion = "6.0" diff --git a/tests/unit_test/opentelemetry_test.py b/tests/unit_test/opentelemetry_test.py index a1c2599..a38dbbc 100644 --- a/tests/unit_test/opentelemetry_test.py +++ b/tests/unit_test/opentelemetry_test.py @@ -1,13 +1,39 @@ from time import sleep -from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika +import pytest +import requests +from fastapi import FastAPI + +from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika, instrument_app +from pyinfra.webserver.utils import create_webserver_thread_from_settings + + +@pytest.fixture(scope="class") +def app_with_tracing(settings): + app = FastAPI() + + @app.get("/test") + def test(): + return {"flat": "earth"} + + instrument_app(app) + + thread = create_webserver_thread_from_settings(app, settings) + thread.start() + sleep(1) + yield + thread.join(timeout=1) + + +@pytest.fixture(scope="session") +def exporter(settings): + settings.tracing.opentelemetry.exporter = "json" + return get_exporter(settings) class TestOpenTelemetry: - def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message, settings): - settings.tracing.opentelemetry.exporter = "json" + def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message, settings, exporter): - exporter = get_exporter(settings) setup_trace(settings, exporter=exporter) instrument_pika() @@ -26,3 +52,12 @@ class TestOpenTelemetry: assert ( exported_trace["resource"]["attributes"]["service.name"] == settings.tracing.opentelemetry.service_name ) + + def test_webserver_requests_are_traced(self, settings, app_with_tracing, exporter): + settings.tracing.opentelemetry.exporter = "json" + + setup_trace(settings, exporter=exporter) + + requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/test") + + print(exporter.traces) From e0b32fa4488b919c87746737abc13e9993419831 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 24 Jan 2024 15:52:33 +0100 Subject: [PATCH 27/39] feat(opentelemetry): fastAPI tracing The tests don't work yet since the webserver has to run in a thread and the traces don't get exported from the thread with local json exporting. However, with an export to an external server this should still work. WIP --- pyinfra/examples.py | 26 +++++++++++---- pyinfra/utils/opentelemetry.py | 6 ++-- pyinfra/webserver/prometheus.py | 9 ++--- pyinfra/webserver/utils.py | 17 ++++++++++ tests/unit_test/opentelemetry_test.py | 48 ++++++++++----------------- 5 files changed, 60 insertions(+), 46 deletions(-) diff --git a/pyinfra/examples.py b/pyinfra/examples.py index e147135..ab00c4f 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -2,17 +2,22 @@ from dynaconf import Dynaconf from fastapi import FastAPI from kn_utils.logging import logger +from pyinfra.config.loader import validate_settings, get_all_validators from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor from pyinfra.queue.manager import QueueManager -from pyinfra.webserver.prometheus import add_prometheus_endpoint, \ - make_prometheus_processing_time_decorator_from_settings +from pyinfra.utils.opentelemetry import setup_trace, instrument_pika, instrument_app +from pyinfra.webserver.prometheus import ( + add_prometheus_endpoint, + make_prometheus_processing_time_decorator_from_settings, +) from pyinfra.webserver.utils import add_health_check_endpoint, create_webserver_thread_from_settings def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataProcessor, settings: Dynaconf): """Default serving logic for research services. - Supplies /health, /ready and /prometheus endpoints. The process_fn is monitored for processing time per call. + Supplies /health, /ready and /prometheus endpoints (if enabled). The process_fn is monitored for processing time per + call. Also traces the queue messages (if enabled). Workload is only received via queue messages. The message contains a file path to the data to be processed, which gets downloaded from the storage. The data and the message are then passed to the process_fn. The process_fn should return a json serializable object. This object is then uploaded to the storage. The response message is just the @@ -20,15 +25,24 @@ def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataPr Adapt as needed. """ + validate_settings(settings, get_all_validators()) + logger.info(f"Starting webserver and queue consumer...") app = FastAPI() - app = add_prometheus_endpoint(app) - process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) - queue_manager = QueueManager(settings) + if settings.metrics.prometheus.enabled: + logger.info(f"Prometheus metrics enabled.") + app = add_prometheus_endpoint(app) + process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) + + if settings.tracing.opentelemetry.enabled: + logger.info(f"OpenTelemetry tracing enabled.") + setup_trace(settings) + instrument_pika() + app = add_health_check_endpoint(app, queue_manager.is_ready) webserver_thread = create_webserver_thread_from_settings(app, settings) diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index c96d8e5..c994ea1 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -2,6 +2,7 @@ import json from dynaconf import Dynaconf from fastapi import FastAPI +from kn_utils.logging import logger from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor @@ -33,10 +34,9 @@ def setup_trace(settings: Dynaconf, service_name: str = None, exporter: SpanExpo exporter = exporter or get_exporter(settings) resource = Resource(attributes={"service.name": service_name}) - provider = TracerProvider(resource=resource) + provider = TracerProvider(resource=resource, shutdown_on_exit=True) processor = BatchSpanProcessor(exporter) - provider.add_span_processor(processor) # TODO: This produces a warning if trying to set the provider twice. @@ -51,7 +51,7 @@ def get_exporter(settings: Dynaconf): if settings.tracing.opentelemetry.exporter == "json": return JsonSpanExporter() elif settings.tracing.opentelemetry.exporter == "otlp": - return OTLPSpanExporter(endpoint=settings.metrics.opentelemetry.endpoint) + return OTLPSpanExporter(endpoint=settings.tracing.opentelemetry.endpoint) elif settings.tracing.opentelemetry.exporter == "console": return ConsoleSpanExporter() else: diff --git a/pyinfra/webserver/prometheus.py b/pyinfra/webserver/prometheus.py index a274dbf..4bd52dc 100644 --- a/pyinfra/webserver/prometheus.py +++ b/pyinfra/webserver/prometheus.py @@ -36,16 +36,11 @@ def make_prometheus_processing_time_decorator_from_settings( postfix: str = "processing_time", registry: CollectorRegistry = REGISTRY, ) -> Decorator: - """Make a decorator for monitoring the processing time of a function. The decorator is only applied if the - prometheus metrics are enabled in the settings. - This, and other metrics should follow the convention - {product name}_{service name}_{processing step / parameter to monitor}. + """Make a decorator for monitoring the processing time of a function. This, and other metrics should follow the + convention {product name}_{service name}_{processing step / parameter to monitor}. """ validate_settings(settings, validators=prometheus_validators) - if not settings.metrics.prometheus.enabled: - return identity - processing_time_sum = Summary( f"{settings.metrics.prometheus.prefix}_{postfix}", "Summed up processing time per call.", diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index fc8a16c..ffe72c5 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -8,11 +8,15 @@ from fastapi import FastAPI from pyinfra.config.validators import webserver_validators from pyinfra.config.loader import validate_settings +from pyinfra.utils.opentelemetry import setup_trace, instrument_app def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: validate_settings(settings, validators=webserver_validators) + if settings.tracing.opentelemetry.enabled: + return create_webserver_thread_with_tracing(app, settings) + return create_webserver_thread(app=app, port=settings.webserver.port, host=settings.webserver.host) @@ -25,6 +29,19 @@ def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thr return thread +def create_webserver_thread_with_tracing(app: FastAPI, settings: Dynaconf) -> threading.Thread: + + def inner(): + setup_trace(settings) + instrument_app(app) + uvicorn.run(app, port=settings.webserver.port, host=settings.webserver.host, log_level=logging.WARNING) + + thread = threading.Thread(target=inner) + thread.daemon = True + + return thread + + HealthFunction = Callable[[], bool] diff --git a/tests/unit_test/opentelemetry_test.py b/tests/unit_test/opentelemetry_test.py index a38dbbc..e624b18 100644 --- a/tests/unit_test/opentelemetry_test.py +++ b/tests/unit_test/opentelemetry_test.py @@ -1,28 +1,8 @@ from time import sleep import pytest -import requests -from fastapi import FastAPI -from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika, instrument_app -from pyinfra.webserver.utils import create_webserver_thread_from_settings - - -@pytest.fixture(scope="class") -def app_with_tracing(settings): - app = FastAPI() - - @app.get("/test") - def test(): - return {"flat": "earth"} - - instrument_app(app) - - thread = create_webserver_thread_from_settings(app, settings) - thread.start() - sleep(1) - yield - thread.join(timeout=1) +from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika @pytest.fixture(scope="session") @@ -33,7 +13,6 @@ def exporter(settings): class TestOpenTelemetry: def test_queue_messages_are_traced(self, queue_manager, input_message, stop_message, settings, exporter): - setup_trace(settings, exporter=exporter) instrument_pika() @@ -53,11 +32,20 @@ class TestOpenTelemetry: exported_trace["resource"]["attributes"]["service.name"] == settings.tracing.opentelemetry.service_name ) - def test_webserver_requests_are_traced(self, settings, app_with_tracing, exporter): - settings.tracing.opentelemetry.exporter = "json" - - setup_trace(settings, exporter=exporter) - - requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/test") - - print(exporter.traces) + # def test_webserver_requests_are_traced(self, settings): + # settings.tracing.opentelemetry.exporter = "console" + # settings.tracing.opentelemetry.enabled = True + # + # app = FastAPI() + # + # @app.get("/test") + # def test(): + # return {"test": "test"} + # + # thread = create_webserver_thread_from_settings(app, settings) + # thread.start() + # sleep(1) + # + # requests.get(f"http://{settings.webserver.host}:{settings.webserver.port}/test") + # + # thread.join(timeout=1) From c18475a77d71303c866a9df826703c74f48edecc Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 24 Jan 2024 17:46:54 +0100 Subject: [PATCH 28/39] feat(opentelemetry): improve readability --- pyinfra/examples.py | 12 ++++++++---- pyinfra/queue/manager.py | 3 +-- scripts/start_pyinfra.py | 9 ++++++--- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/pyinfra/examples.py b/pyinfra/examples.py index ab00c4f..21a82a7 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -5,7 +5,7 @@ from kn_utils.logging import logger from pyinfra.config.loader import validate_settings, get_all_validators from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor from pyinfra.queue.manager import QueueManager -from pyinfra.utils.opentelemetry import setup_trace, instrument_pika, instrument_app +from pyinfra.utils.opentelemetry import setup_trace, instrument_pika from pyinfra.webserver.prometheus import ( add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings, @@ -13,11 +13,15 @@ from pyinfra.webserver.prometheus import ( from pyinfra.webserver.utils import add_health_check_endpoint, create_webserver_thread_from_settings -def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataProcessor, settings: Dynaconf): +def start_standard_queue_consumer( + process_fn: DataProcessor, + settings: Dynaconf, + app: FastAPI = None, +): """Default serving logic for research services. Supplies /health, /ready and /prometheus endpoints (if enabled). The process_fn is monitored for processing time per - call. Also traces the queue messages (if enabled). + call. Also traces the queue messages via openTelemetry (if enabled). Workload is only received via queue messages. The message contains a file path to the data to be processed, which gets downloaded from the storage. The data and the message are then passed to the process_fn. The process_fn should return a json serializable object. This object is then uploaded to the storage. The response message is just the @@ -29,7 +33,7 @@ def start_queue_consumer_with_prometheus_and_health_endpoints(process_fn: DataPr logger.info(f"Starting webserver and queue consumer...") - app = FastAPI() + app = app or FastAPI() queue_manager = QueueManager(settings) diff --git a/pyinfra/queue/manager.py b/pyinfra/queue/manager.py index e284438..b71f0bd 100644 --- a/pyinfra/queue/manager.py +++ b/pyinfra/queue/manager.py @@ -141,11 +141,10 @@ class QueueManager: logger.info("Processing payload in separate thread.") future = thread_pool_executor.submit(message_processor, unpacked_message_body) - # FIXME: This block is probably not necessary, but kept since the implications of removing it are + # TODO: This block is probably not necessary, but kept since the implications of removing it are # unclear. Remove it in a future iteration where less changes are being made to the code base. while future.running(): logger.debug("Waiting for payload processing to finish...") - self.connection.process_data_events() self.connection.sleep(self.connection_sleep) return future.result() diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 4cda348..69fe2d9 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,7 +1,9 @@ import time +from dynaconf import Dynaconf + from pyinfra.config.loader import load_settings, parse_args -from pyinfra.examples import start_queue_consumer_with_prometheus_and_health_endpoints +from pyinfra.examples import start_standard_queue_consumer def processor_mock(_data: dict, _message: dict) -> dict: @@ -10,5 +12,6 @@ def processor_mock(_data: dict, _message: dict) -> dict: if __name__ == "__main__": - settings = load_settings(parse_args().settings_path) - start_queue_consumer_with_prometheus_and_health_endpoints(processor_mock, settings) + arguments = parse_args() + settings = load_settings(arguments.settings_path) + start_standard_queue_consumer(processor_mock, settings) From 8ff637d6baa0d1a1279253f4433a228386d91e17 Mon Sep 17 00:00:00 2001 From: Isaac Riley Date: Thu, 25 Jan 2024 08:25:19 +0100 Subject: [PATCH 29/39] chore: add opentelemetry subsection to README.md; formatting --- README.md | 7 +++++++ pyinfra/examples.py | 11 +++++++---- pyinfra/queue/callback.py | 5 ++++- pyinfra/storage/connection.py | 7 +++++-- pyinfra/storage/storages/azure.py | 4 ++-- pyinfra/storage/storages/s3.py | 4 ++-- pyinfra/utils/opentelemetry.py | 8 ++++++-- pyinfra/webserver/prometheus.py | 6 +++--- pyinfra/webserver/utils.py | 5 ++--- tests/unit_test/knutils_logger_test.py | 3 ++- tests/unit_test/opentelemetry_test.py | 2 +- tests/unit_test/prometheus_monitoring_test.py | 5 ++++- tests/unit_test/storage_test.py | 5 ++++- 13 files changed, 49 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 5f4b5e3..98ac178 100755 --- a/README.md +++ b/README.md @@ -61,6 +61,13 @@ the [complete example](pyinfra/examples.py). | TRACING__ENDPOINT | tracing.endpoint | Endpoint to which OpenTelemetry traces are exported | TRACING__SERVER_NAME | tracing.server_name | Name of the service as displayed in the traces collected +### OpenTelemetry + +Open telemetry (vis its Python SDK) is set up to be as unobtrusive as possible; for typical use cases it can be configured +from environment variables, without additional work in the microservice app, although additional confiuration is possible. + +`TRACING_ENDPOINT` should typically be set to `http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces`. + ## Queue Manager The queue manager is responsible for consuming messages from the input queue, processing them and sending the response diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 21a82a7..4e62e69 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -2,15 +2,18 @@ from dynaconf import Dynaconf from fastapi import FastAPI from kn_utils.logging import logger -from pyinfra.config.loader import validate_settings, get_all_validators -from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor +from pyinfra.config.loader import get_all_validators, validate_settings +from pyinfra.queue.callback import DataProcessor, make_download_process_upload_callback from pyinfra.queue.manager import QueueManager -from pyinfra.utils.opentelemetry import setup_trace, instrument_pika +from pyinfra.utils.opentelemetry import instrument_pika, setup_trace from pyinfra.webserver.prometheus import ( add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings, ) -from pyinfra.webserver.utils import add_health_check_endpoint, create_webserver_thread_from_settings +from pyinfra.webserver.utils import ( + add_health_check_endpoint, + create_webserver_thread_from_settings, +) def start_standard_queue_consumer( diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py index 2b39a45..997c3f4 100644 --- a/pyinfra/queue/callback.py +++ b/pyinfra/queue/callback.py @@ -4,7 +4,10 @@ from dynaconf import Dynaconf from kn_utils.logging import logger from pyinfra.storage.connection import get_storage -from pyinfra.storage.utils import download_data_as_specified_in_message, upload_data_as_specified_in_message +from pyinfra.storage.utils import ( + download_data_as_specified_in_message, + upload_data_as_specified_in_message, +) DataProcessor = Callable[[Union[dict, bytes], dict], dict] diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index 75d278a..442463c 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -4,12 +4,15 @@ import requests from dynaconf import Dynaconf from kn_utils.logging import logger +from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import ( + multi_tenant_storage_validators, + storage_validators, +) from pyinfra.storage.storages.azure import get_azure_storage_from_settings from pyinfra.storage.storages.s3 import get_s3_storage_from_settings from pyinfra.storage.storages.storage import Storage from pyinfra.utils.cipher import decrypt -from pyinfra.config.validators import storage_validators, multi_tenant_storage_validators -from pyinfra.config.loader import validate_settings def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: diff --git a/pyinfra/storage/storages/azure.py b/pyinfra/storage/storages/azure.py index 1207d9a..4ffd2d4 100644 --- a/pyinfra/storage/storages/azure.py +++ b/pyinfra/storage/storages/azure.py @@ -7,9 +7,9 @@ from dynaconf import Dynaconf from kn_utils.logging import logger from retry import retry -from pyinfra.storage.storages.storage import Storage -from pyinfra.config.validators import azure_storage_validators from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import azure_storage_validators +from pyinfra.storage.storages.storage import Storage logging.getLogger("azure").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) diff --git a/pyinfra/storage/storages/s3.py b/pyinfra/storage/storages/s3.py index 0a6636e..d84601f 100644 --- a/pyinfra/storage/storages/s3.py +++ b/pyinfra/storage/storages/s3.py @@ -7,9 +7,9 @@ from kn_utils.logging import logger from minio import Minio from retry import retry -from pyinfra.storage.storages.storage import Storage -from pyinfra.config.validators import s3_storage_validators from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import s3_storage_validators +from pyinfra.storage.storages.storage import Storage from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index c994ea1..b3909af 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -9,8 +9,12 @@ from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from opentelemetry.instrumentation.pika import PikaInstrumentor from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter -from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult +from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ConsoleSpanExporter, + SpanExporter, + SpanExportResult, +) from pyinfra.config.loader import validate_settings from pyinfra.config.validators import opentelemetry_validators diff --git a/pyinfra/webserver/prometheus.py b/pyinfra/webserver/prometheus.py index 4bd52dc..6a9f045 100644 --- a/pyinfra/webserver/prometheus.py +++ b/pyinfra/webserver/prometheus.py @@ -4,11 +4,11 @@ from typing import Callable, TypeVar from dynaconf import Dynaconf from fastapi import FastAPI from funcy import identity -from prometheus_client import generate_latest, CollectorRegistry, REGISTRY, Summary +from prometheus_client import REGISTRY, CollectorRegistry, Summary, generate_latest from starlette.responses import Response -from pyinfra.config.validators import prometheus_validators from pyinfra.config.loader import validate_settings +from pyinfra.config.validators import prometheus_validators def add_prometheus_endpoint(app: FastAPI, registry: CollectorRegistry = REGISTRY) -> FastAPI: @@ -37,7 +37,7 @@ def make_prometheus_processing_time_decorator_from_settings( registry: CollectorRegistry = REGISTRY, ) -> Decorator: """Make a decorator for monitoring the processing time of a function. This, and other metrics should follow the - convention {product name}_{service name}_{processing step / parameter to monitor}. + convention {product name}_{service name}_{processing step / parameter to monitor}. """ validate_settings(settings, validators=prometheus_validators) diff --git a/pyinfra/webserver/utils.py b/pyinfra/webserver/utils.py index ffe72c5..bbf5ac5 100644 --- a/pyinfra/webserver/utils.py +++ b/pyinfra/webserver/utils.py @@ -6,9 +6,9 @@ import uvicorn from dynaconf import Dynaconf from fastapi import FastAPI -from pyinfra.config.validators import webserver_validators from pyinfra.config.loader import validate_settings -from pyinfra.utils.opentelemetry import setup_trace, instrument_app +from pyinfra.config.validators import webserver_validators +from pyinfra.utils.opentelemetry import instrument_app, setup_trace def create_webserver_thread_from_settings(app: FastAPI, settings: Dynaconf) -> threading.Thread: @@ -30,7 +30,6 @@ def create_webserver_thread(app: FastAPI, port: int, host: str) -> threading.Thr def create_webserver_thread_with_tracing(app: FastAPI, settings: Dynaconf) -> threading.Thread: - def inner(): setup_trace(settings) instrument_app(app) diff --git a/tests/unit_test/knutils_logger_test.py b/tests/unit_test/knutils_logger_test.py index f082428..1248ce2 100644 --- a/tests/unit_test/knutils_logger_test.py +++ b/tests/unit_test/knutils_logger_test.py @@ -4,7 +4,7 @@ from kn_utils.logging import logger def test_necessary_log_levels_are_supported_by_kn_utils(): logger.setLevel("TRACE") - + logger.trace("trace") logger.debug("debug") logger.info("info") @@ -13,6 +13,7 @@ def test_necessary_log_levels_are_supported_by_kn_utils(): logger.exception("exception", exc_info="this is an exception") logger.error("error", exc_info="this is an error") + def test_setlevel_warn(): logger.setLevel("WARN") logger.warning("warn") diff --git a/tests/unit_test/opentelemetry_test.py b/tests/unit_test/opentelemetry_test.py index e624b18..3a68ab9 100644 --- a/tests/unit_test/opentelemetry_test.py +++ b/tests/unit_test/opentelemetry_test.py @@ -2,7 +2,7 @@ from time import sleep import pytest -from pyinfra.utils.opentelemetry import get_exporter, setup_trace, instrument_pika +from pyinfra.utils.opentelemetry import get_exporter, instrument_pika, setup_trace @pytest.fixture(scope="session") diff --git a/tests/unit_test/prometheus_monitoring_test.py b/tests/unit_test/prometheus_monitoring_test.py index 3ced056..37bf458 100644 --- a/tests/unit_test/prometheus_monitoring_test.py +++ b/tests/unit_test/prometheus_monitoring_test.py @@ -5,7 +5,10 @@ import pytest import requests from fastapi import FastAPI -from pyinfra.webserver.prometheus import add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings +from pyinfra.webserver.prometheus import ( + add_prometheus_endpoint, + make_prometheus_processing_time_decorator_from_settings, +) from pyinfra.webserver.utils import create_webserver_thread_from_settings diff --git a/tests/unit_test/storage_test.py b/tests/unit_test/storage_test.py index 365d066..6f69be9 100644 --- a/tests/unit_test/storage_test.py +++ b/tests/unit_test/storage_test.py @@ -6,7 +6,10 @@ import pytest from fastapi import FastAPI from pyinfra.storage.connection import get_storage_from_tenant_id -from pyinfra.storage.utils import download_data_as_specified_in_message, upload_data_as_specified_in_message +from pyinfra.storage.utils import ( + download_data_as_specified_in_message, + upload_data_as_specified_in_message, +) from pyinfra.utils.cipher import encrypt from pyinfra.webserver.utils import create_webserver_thread From f6f56b8d8c0681b7a3c8ba199867877141071ed4 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 25 Jan 2024 09:08:51 +0100 Subject: [PATCH 30/39] refactoy: simplify storage connection logic --- pyinfra/storage/connection.py | 121 +++++++++++++++----------------- tests/conftest.py | 4 +- tests/unit_test/storage_test.py | 8 ++- 3 files changed, 64 insertions(+), 69 deletions(-) diff --git a/pyinfra/storage/connection.py b/pyinfra/storage/connection.py index 442463c..08cb343 100644 --- a/pyinfra/storage/connection.py +++ b/pyinfra/storage/connection.py @@ -16,83 +16,74 @@ from pyinfra.utils.cipher import decrypt def get_storage(settings: Dynaconf, tenant_id: str = None) -> Storage: - """Get storage connection based on settings. - If tenant_id is provided, gets storage connection information from tenant server instead. - The connections are cached based on the settings.cache_size value. - - In the future, when the default storage from config is no longer needed (only multi-tenant storage will be used), - get_storage_from_tenant_id can replace this function directly. + """Establishes a storage connection. + If tenant_id is provided, gets storage connection information from tenant server. These connections are cached. + Otherwise, gets storage connection information from settings. """ logger.info("Establishing storage connection...") if tenant_id: logger.info(f"Using tenant storage for {tenant_id}.") - return get_storage_from_tenant_id(tenant_id, settings) - else: - logger.info("Using default storage.") - return get_storage_from_settings(settings) + validate_settings(settings, multi_tenant_storage_validators) + return get_storage_for_tenant( + tenant_id, + settings.storage.tenant_server.endpoint, + settings.storage.tenant_server.public_key, + ) -def get_storage_from_settings(settings: Dynaconf) -> Storage: + logger.info("Using default storage.") validate_settings(settings, storage_validators) - @lru_cache(maxsize=settings.storage.cache_size) - def _get_storage(backend: str) -> Storage: - return storage_dispatcher[backend](settings) - - return _get_storage(settings.storage.backend) - - -def get_storage_from_tenant_id(tenant_id: str, settings: Dynaconf) -> Storage: - validate_settings(settings, multi_tenant_storage_validators) - - @lru_cache(maxsize=settings.storage.cache_size) - def _get_storage(tenant: str, endpoint: str, public_key: str) -> Storage: - response = requests.get(f"{endpoint}/{tenant}").json() - - maybe_azure = response.get("azureStorageConnection") - maybe_s3 = response.get("s3StorageConnection") - assert (maybe_azure or maybe_s3) and not (maybe_azure and maybe_s3), "Only one storage backend can be used." - - if maybe_azure: - connection_string = decrypt(public_key, maybe_azure["connectionString"]) - backend = "azure" - storage_info = { - "storage": { - "azure": { - "connection_string": connection_string, - "container": maybe_azure["containerName"], - }, - } - } - elif maybe_s3: - secret = decrypt(public_key, maybe_s3["secret"]) - backend = "s3" - storage_info = { - "storage": { - "s3": { - "endpoint": maybe_s3["endpoint"], - "key": maybe_s3["key"], - "secret": secret, - "region": maybe_s3["region"], - "bucket": maybe_s3["bucketName"], - }, - } - } - else: - raise Exception(f"Unknown storage backend in {response}.") - - storage_settings = Dynaconf() - storage_settings.update(storage_info) - - storage = storage_dispatcher[backend](storage_settings) - - return storage - - return _get_storage(tenant_id, settings.storage.tenant_server.endpoint, settings.storage.tenant_server.public_key) + return storage_dispatcher[settings.storage.backend](settings) storage_dispatcher = { "azure": get_azure_storage_from_settings, "s3": get_s3_storage_from_settings, } + + +@lru_cache(maxsize=10) +def get_storage_for_tenant(tenant: str, endpoint: str, public_key: str) -> Storage: + response = requests.get(f"{endpoint}/{tenant}").json() + + maybe_azure = response.get("azureStorageConnection") + maybe_s3 = response.get("s3StorageConnection") + + assert (maybe_azure or maybe_s3) and not (maybe_azure and maybe_s3), "Only one storage backend can be used." + + if maybe_azure: + connection_string = decrypt(public_key, maybe_azure["connectionString"]) + backend = "azure" + storage_info = { + "storage": { + "azure": { + "connection_string": connection_string, + "container": maybe_azure["containerName"], + }, + } + } + elif maybe_s3: + secret = decrypt(public_key, maybe_s3["secret"]) + backend = "s3" + storage_info = { + "storage": { + "s3": { + "endpoint": maybe_s3["endpoint"], + "key": maybe_s3["key"], + "secret": secret, + "region": maybe_s3["region"], + "bucket": maybe_s3["bucketName"], + }, + } + } + else: + raise Exception(f"Unknown storage backend in {response}.") + + storage_settings = Dynaconf() + storage_settings.update(storage_info) + + storage = storage_dispatcher[backend](storage_settings) + + return storage diff --git a/tests/conftest.py b/tests/conftest.py index 796a508..76b4495 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,7 +4,7 @@ import pytest from pyinfra.config.loader import load_settings, pyinfra_config_path from pyinfra.queue.manager import QueueManager -from pyinfra.storage.connection import get_storage_from_settings +from pyinfra.storage.connection import get_storage @pytest.fixture(scope="session") @@ -16,7 +16,7 @@ def settings(): def storage(storage_backend, settings): settings.storage.backend = storage_backend - storage = get_storage_from_settings(settings) + storage = get_storage(settings) storage.make_bucket() yield storage diff --git a/tests/unit_test/storage_test.py b/tests/unit_test/storage_test.py index 6f69be9..dd3ea25 100644 --- a/tests/unit_test/storage_test.py +++ b/tests/unit_test/storage_test.py @@ -5,7 +5,7 @@ from time import sleep import pytest from fastapi import FastAPI -from pyinfra.storage.connection import get_storage_from_tenant_id +from pyinfra.storage.connection import get_storage_for_tenant from pyinfra.storage.utils import ( download_data_as_specified_in_message, upload_data_as_specified_in_message, @@ -106,7 +106,11 @@ class TestMultiTenantStorage: self, tenant_id, tenant_server_mock, settings, tenant_server_host, tenant_server_port ): settings["storage"]["tenant_server"]["endpoint"] = f"http://{tenant_server_host}:{tenant_server_port}" - storage = get_storage_from_tenant_id(tenant_id, settings) + storage = get_storage_for_tenant( + tenant_id, + settings["storage"]["tenant_server"]["endpoint"], + settings["storage"]["tenant_server"]["public_key"], + ) storage.put_object("file", b"content") data_received = storage.get_object("file") From b2f073e0c5cf22cf8a4331ceb9f76b404463578e Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 25 Jan 2024 10:41:48 +0100 Subject: [PATCH 31/39] refactor: IoC for callback, update readme --- README.md | 98 ++++++++++++++++++++++------------ pyinfra/examples.py | 19 +++---- pyinfra/queue/callback.py | 12 ++--- pyinfra/utils/opentelemetry.py | 10 ++-- scripts/start_pyinfra.py | 7 +-- 5 files changed, 85 insertions(+), 61 deletions(-) diff --git a/README.md b/README.md index 98ac178..dec3e7c 100755 --- a/README.md +++ b/README.md @@ -7,7 +7,6 @@ 5. [ Scripts ](#scripts) 6. [ Tests ](#tests) - ## About Shared library for the research team, containing code related to infrastructure and communication with other services. @@ -31,42 +30,44 @@ The following table shows all necessary settings. You can find a preconfigured s bitbucket. These are the complete settings, you only need all if using all features of the service as described in the [complete example](pyinfra/examples.py). -| Environment Variable | Internal / .toml Name | Description | -|------------------------------------|----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| LOGGING__LEVEL | logging.level | Log level | -| METRICS__PROMETHEUS__ENABLED | metrics.prometheus.enabled | Enable Prometheus metrics collection | -| METRICS__PROMETHEUS__PREFIX | metrics.prometheus.prefix | Prefix for Prometheus metrics (e.g. {product}-{service}) | -| WEBSERVER__HOST | webserver.host | Host of the webserver (offering e.g. /prometheus, /ready and /health endpoints) | -| WEBSERVER__PORT | webserver.port | Port of the webserver | -| RABBITMQ__HOST | rabbitmq.host | Host of the RabbitMQ server | -| RABBITMQ__PORT | rabbitmq.port | Port of the RabbitMQ server | -| RABBITMQ__USERNAME | rabbitmq.username | Username for the RabbitMQ server | -| RABBITMQ__PASSWORD | rabbitmq.password | Password for the RabbitMQ server | -| RABBITMQ__HEARTBEAT | rabbitmq.heartbeat | Heartbeat for the RabbitMQ server | -| RABBITMQ__CONNECTION_SLEEP | rabbitmq.connection_sleep | Sleep time intervals during message processing. Has to be a divider of heartbeat, and shouldn't be too big, since only in these intervals queue interactions happen (like receiving new messages) This is also the minimum time the service needs to process a message. | -| RABBITMQ__INPUT_QUEUE | rabbitmq.input_queue | Name of the input queue | -| RABBITMQ__OUTPUT_QUEUE | rabbitmq.output_queue | Name of the output queue | -| RABBITMQ__DEAD_LETTER_QUEUE | rabbitmq.dead_letter_queue | Name of the dead letter queue | -| STORAGE__BACKEND | storage.backend | Storage backend to use (currently only "s3" and "azure" are supported) | -| STORAGE__CACHE_SIZE | storage.cache_size | Number of cached storage connection (to reduce connection stops and reconnects for multi tenancy). | -| STORAGE__S3__BUCKET_NAME | storage.s3.bucket_name | Name of the S3 bucket | -| STORAGE__S3__ENDPOINT | storage.s3.endpoint | Endpoint of the S3 server | -| STORAGE__S3__KEY | storage.s3.key | Access key for the S3 server | -| STORAGE__S3__SECRET | storage.s3.secret | Secret key for the S3 server | -| STORAGE__S3__REGION | storage.s3.region | Region of the S3 server | -| STORAGE__AZURE__CONTAINER | storage.azure.container_name | Name of the Azure container | -| STORAGE__AZURE__CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | -| STORAGE__TENANT_SERVER__PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | -| STORAGE__TENANT_SERVER__ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | -| TRACING__ENDPOINT | tracing.endpoint | Endpoint to which OpenTelemetry traces are exported -| TRACING__SERVER_NAME | tracing.server_name | Name of the service as displayed in the traces collected +| Environment Variable | Internal / .toml Name | Description | +|--------------------------------------|------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| LOGGING__LEVEL | logging.level | Log level | +| METRICS__PROMETHEUS__ENABLED | metrics.prometheus.enabled | Enable Prometheus metrics collection | +| METRICS__PROMETHEUS__PREFIX | metrics.prometheus.prefix | Prefix for Prometheus metrics (e.g. {product}-{service}) | +| WEBSERVER__HOST | webserver.host | Host of the webserver (offering e.g. /prometheus, /ready and /health endpoints) | +| WEBSERVER__PORT | webserver.port | Port of the webserver | +| RABBITMQ__HOST | rabbitmq.host | Host of the RabbitMQ server | +| RABBITMQ__PORT | rabbitmq.port | Port of the RabbitMQ server | +| RABBITMQ__USERNAME | rabbitmq.username | Username for the RabbitMQ server | +| RABBITMQ__PASSWORD | rabbitmq.password | Password for the RabbitMQ server | +| RABBITMQ__HEARTBEAT | rabbitmq.heartbeat | Heartbeat for the RabbitMQ server | +| RABBITMQ__CONNECTION_SLEEP | rabbitmq.connection_sleep | Sleep time intervals during message processing. Has to be a divider of heartbeat, and shouldn't be too big, since only in these intervals queue interactions happen (like receiving new messages) This is also the minimum time the service needs to process a message. | +| RABBITMQ__INPUT_QUEUE | rabbitmq.input_queue | Name of the input queue | +| RABBITMQ__OUTPUT_QUEUE | rabbitmq.output_queue | Name of the output queue | +| RABBITMQ__DEAD_LETTER_QUEUE | rabbitmq.dead_letter_queue | Name of the dead letter queue | +| STORAGE__BACKEND | storage.backend | Storage backend to use (currently only "s3" and "azure" are supported) | +| STORAGE__S3__BUCKET | storage.s3.bucket | Name of the S3 bucket | +| STORAGE__S3__ENDPOINT | storage.s3.endpoint | Endpoint of the S3 server | +| STORAGE__S3__KEY | storage.s3.key | Access key for the S3 server | +| STORAGE__S3__SECRET | storage.s3.secret | Secret key for the S3 server | +| STORAGE__S3__REGION | storage.s3.region | Region of the S3 server | +| STORAGE__AZURE__CONTAINER | storage.azure.container_name | Name of the Azure container | +| STORAGE__AZURE__CONNECTION_STRING | storage.azure.connection_string | Connection string for the Azure server | +| STORAGE__TENANT_SERVER__PUBLIC_KEY | storage.tenant_server.public_key | Public key of the tenant server | +| STORAGE__TENANT_SERVER__ENDPOINT | storage.tenant_server.endpoint | Endpoint of the tenant server | +| TRACING__OPENTELEMETRY__ENDPOINT | tracing.opentelemetry.endpoint | Endpoint to which OpenTelemetry traces are exported +| TRACING__OPENTELEMETRY__SERVICE_NAME | tracing.opentelemetry.service_name | Name of the service as displayed in the traces collected ### OpenTelemetry -Open telemetry (vis its Python SDK) is set up to be as unobtrusive as possible; for typical use cases it can be configured -from environment variables, without additional work in the microservice app, although additional confiuration is possible. +Open telemetry (vis its Python SDK) is set up to be as unobtrusive as possible; for typical use cases it can be +configured +from environment variables, without additional work in the microservice app, although additional confiuration is +possible. -`TRACING_ENDPOINT` should typically be set to `http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces`. +`TRACING_ENDPOINT` should typically be set +to `http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces`. ## Queue Manager @@ -75,7 +76,7 @@ to the output queue. The default callback also downloads data from the storage a The response message does not contain the data itself, but the identifiers from the input message (including headers beginning with "X-"). -Usage: +### Standalone Usage ```python from pyinfra.queue.manager import QueueManager @@ -86,11 +87,38 @@ settings = load_settings("path/to/settings") processing_function: DataProcessor # function should expect a dict (json) or bytes (pdf) as input and should return a json serializable object. queue_manager = QueueManager(settings) -queue_manager.start_consuming(make_download_process_upload_callback(processing_function, settings)) +callback = make_download_process_upload_callback(processing_function, settings) +queue_manager.start_consuming(make_download_process_upload_callback(callback, settings)) ``` +### Usage in a Service + +This is the recommended way to use the module. This includes the webserver, Prometheus metrics and health endpoints. +Custom endpoints can be added by adding a new route to the `app` object beforehand. Settings are loaded from files +specified as CLI arguments (e.g. `--settings-path path/to/settings.toml`). The values can also be set or overriden via +environment variables (e.g. `LOGGING__LEVEL=DEBUG`). + +The callback can be replaced with a custom one, for example if the data to process is contained in the message itself +and not on the storage. + +```python +from pyinfra.config.loader import load_settings, parse_args +from pyinfra.examples import start_standard_queue_consumer +from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor + +processing_function: DataProcessor + +arguments = parse_args() +settings = load_settings(arguments.settings_path) + +callback = make_download_process_upload_callback(processing_function, settings) +start_standard_queue_consumer(callback, settings) # optionally also pass a fastAPI app object with preconfigured routes +``` + + ### AMQP input message: + Either use the legacy format with dossierId and fileId as strings or the new format where absolute paths are used. All headers beginning with "X-" are forwarded to the message processor, and returned in the response message (e.g. "X-TENANT-ID" is used to acquire storage information for the tenant). diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 4e62e69..810542d 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -3,7 +3,7 @@ from fastapi import FastAPI from kn_utils.logging import logger from pyinfra.config.loader import get_all_validators, validate_settings -from pyinfra.queue.callback import DataProcessor, make_download_process_upload_callback +from pyinfra.queue.callback import Callback from pyinfra.queue.manager import QueueManager from pyinfra.utils.opentelemetry import instrument_pika, setup_trace from pyinfra.webserver.prometheus import ( @@ -17,20 +17,16 @@ from pyinfra.webserver.utils import ( def start_standard_queue_consumer( - process_fn: DataProcessor, + callback: Callback, settings: Dynaconf, app: FastAPI = None, ): """Default serving logic for research services. - Supplies /health, /ready and /prometheus endpoints (if enabled). The process_fn is monitored for processing time per - call. Also traces the queue messages via openTelemetry (if enabled). - Workload is only received via queue messages. The message contains a file path to the data to be processed, which - gets downloaded from the storage. The data and the message are then passed to the process_fn. The process_fn should - return a json serializable object. This object is then uploaded to the storage. The response message is just the - original message. - - Adapt as needed. + Supplies /health, /ready and /prometheus endpoints (if enabled). The callback is monitored for processing time per + message. Also traces the queue messages via openTelemetry (if enabled). + Workload is received via queue messages and processed by the callback function (see pyinfra.queue.callback for + callbacks). """ validate_settings(settings, get_all_validators()) @@ -43,7 +39,7 @@ def start_standard_queue_consumer( if settings.metrics.prometheus.enabled: logger.info(f"Prometheus metrics enabled.") app = add_prometheus_endpoint(app) - process_fn = make_prometheus_processing_time_decorator_from_settings(settings)(process_fn) + callback = make_prometheus_processing_time_decorator_from_settings(settings)(callback) if settings.tracing.opentelemetry.enabled: logger.info(f"OpenTelemetry tracing enabled.") @@ -55,5 +51,4 @@ def start_standard_queue_consumer( webserver_thread = create_webserver_thread_from_settings(app, settings) webserver_thread.start() - callback = make_download_process_upload_callback(process_fn, settings) queue_manager.start_consuming(callback) diff --git a/pyinfra/queue/callback.py b/pyinfra/queue/callback.py index 997c3f4..8389660 100644 --- a/pyinfra/queue/callback.py +++ b/pyinfra/queue/callback.py @@ -9,18 +9,18 @@ from pyinfra.storage.utils import ( upload_data_as_specified_in_message, ) -DataProcessor = Callable[[Union[dict, bytes], dict], dict] +DataProcessor = Callable[[Union[dict, bytes], dict], Union[dict, list, str]] +Callback = Callable[[dict], dict] -def make_download_process_upload_callback(data_processor: DataProcessor, settings: Dynaconf): +def make_download_process_upload_callback(data_processor: DataProcessor, settings: Dynaconf) -> Callback: """Default callback for processing queue messages. + Data will be downloaded from the storage as specified in the message. If a tenant id is specified, the storage will be configured to use that tenant id, otherwise the storage is configured as specified in the settings. The data is the passed to the dataprocessor, together with the message. The dataprocessor should return a - json serializable object. This object is then uploaded to the storage as specified in the message. - - The response message is just the original message. - Adapt as needed. + json serializable object. This object is then uploaded to the storage as specified in the message. The response + message is just the original message. """ def inner(queue_message_payload: dict) -> dict: diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index b3909af..20341cd 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -2,7 +2,6 @@ import json from dynaconf import Dynaconf from fastapi import FastAPI -from kn_utils.logging import logger from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor @@ -43,10 +42,11 @@ def setup_trace(settings: Dynaconf, service_name: str = None, exporter: SpanExpo processor = BatchSpanProcessor(exporter) provider.add_span_processor(processor) - # TODO: This produces a warning if trying to set the provider twice. - # "WARNING opentelemetry.trace:__init__.py:521 Overriding of current TracerProvider is not allowed" - # This doesn't affect our current usage but should be fixed eventually. - trace.set_tracer_provider(provider) + # TODO: trace.set_tracer_provider produces a warning if trying to set the provider twice. + # "WARNING opentelemetry.trace:__init__.py:521 Overriding of current TracerProvider is not allowed" + # This doesn't seem to affect the functionality since we only want to use the tracer provided set in the beginning. + # We work around the log message by using the protected method with log=False. + trace._set_tracer_provider(provider, log=False) def get_exporter(settings: Dynaconf): diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 69fe2d9..3494294 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,9 +1,8 @@ import time -from dynaconf import Dynaconf - from pyinfra.config.loader import load_settings, parse_args from pyinfra.examples import start_standard_queue_consumer +from pyinfra.queue.callback import make_download_process_upload_callback def processor_mock(_data: dict, _message: dict) -> dict: @@ -14,4 +13,6 @@ def processor_mock(_data: dict, _message: dict) -> dict: if __name__ == "__main__": arguments = parse_args() settings = load_settings(arguments.settings_path) - start_standard_queue_consumer(processor_mock, settings) + + callback = make_download_process_upload_callback(processor_mock, settings) + start_standard_queue_consumer(callback, settings) From ec9ab211989eb1b9a01eda1c9ca888815d99a81f Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Thu, 25 Jan 2024 11:08:50 +0100 Subject: [PATCH 32/39] package: increment major version and update kn-utils --- poetry.lock | 74 +++++++++++++++++++++++++------------------------- pyproject.toml | 4 +-- 2 files changed, 39 insertions(+), 39 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8efbb41..9b97766 100644 --- a/poetry.lock +++ b/poetry.lock @@ -539,43 +539,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.0" +version = "42.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, - {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, - {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, - {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, - {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, - {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, - {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, + {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, + {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, + {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, + {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, + {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, + {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, ] [package.dependencies] @@ -1568,13 +1568,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2531,4 +2531,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "2e56e0e3c159f5d8fcc95b8623d56b1dbf94318230f5550a4c5aa15c288dfc84" +content-hash = "3de2b41d81f76e392559fdad3472d1849534899a9c4ac8454a17c316c6af13ae" diff --git a/pyproject.toml b/pyproject.toml index 55644d9..92e3c41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyinfra" -version = "1.10.0" +version = "2.0.0" description = "" authors = ["Team Research "] license = "All rights reseverd" @@ -19,7 +19,7 @@ azure-storage-blob = "^12.13" funcy = "^2" pycryptodome = "^3.19" # research shared packages -kn-utils = { version = "^0.2.4.dev112", source = "gitlab-research" } +kn-utils = { version = "^0.2.7", source = "gitlab-research" } fastapi = "^0.109.0" uvicorn = "^0.26.0" From fff5be2e508c492d64c56472b353374ab69cf510 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Mon, 29 Jan 2024 16:01:49 +0100 Subject: [PATCH 33/39] feat(settings): improve config loading logic Load settings from .toml files, .env and environment variables. Also ensures a ROOT_PATH environment variable is set. If ROOT_PATH is not set and no root_path argument is passed, the current working directory is used as root. Settings paths can be a single .toml file, a folder containing .toml files or a list of .toml files and folders. If a folder is passed, all .toml files in the folder are loaded. If settings path is None, only .env and environment variables are loaded. If settings_path are relative paths, they are joined with the root_path argument. --- pyinfra/config/loader.py | 104 +++++++++++++++++++++++++++------ pyinfra/examples.py | 4 +- scripts/send_request.py | 4 +- tests/conftest.py | 4 +- tests/unit_test/config_test.py | 25 +++++++- 5 files changed, 113 insertions(+), 28 deletions(-) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index bfc728b..5da779c 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -1,5 +1,6 @@ import argparse import os +from functools import partial from pathlib import Path from typing import Union @@ -7,20 +8,27 @@ from dynaconf import Dynaconf, ValidationError, Validator from funcy import lflatten from kn_utils.logging import logger +# This path is ment for testing purposes and convenience. It probably won't reflect the actual root path when pyinfra is +# installed as a package, so don't use it in production code, but define your own root path as described in load config. +local_pyinfra_root_path = Path(__file__).parents[2] -def load_settings(settings_path: Union[str, Path] = None, validators: list[Validator] = None): - settings_path = Path(settings_path) if settings_path else None - validators = validators or get_all_validators() - if not settings_path: - logger.info("No settings path specified, only loading .env end ENVs.") - settings_files = [] - elif os.path.isdir(settings_path): - logger.info(f"Settings path is a directory, loading all .toml files in the directory: {settings_path}") - settings_files = list(settings_path.glob("*.toml")) - else: - logger.info(f"Settings path is a file, loading only the specified file: {settings_path}") - settings_files = [settings_path] +def load_settings( + settings_path: Union[str, Path, list] = "config/", + root_path: Union[str, Path] = None, + validators: list[Validator] = None, +): + """Load settings from .toml files, .env and environment variables. Also ensures a ROOT_PATH environment variable is + set. If ROOT_PATH is not set and no root_path argument is passed, the current working directory is used as root. + Settings paths can be a single .toml file, a folder containing .toml files or a list of .toml files and folders. + If a folder is passed, all .toml files in the folder are loaded. If settings path is None, only .env and + environment variables are loaded. If settings_path are relative paths, they are joined with the root_path argument. + """ + + root_path = get_or_set_root_path(root_path) + validators = validators or get_pyinfra_validators() + + settings_files = normalize_to_settings_files(settings_path, root_path) settings = Dynaconf( load_dotenv=True, @@ -34,10 +42,70 @@ def load_settings(settings_path: Union[str, Path] = None, validators: list[Valid return settings -pyinfra_config_path = Path(__file__).resolve().parents[2] / "config/" +def normalize_to_settings_files(settings_path: Union[str, Path, list], root_path: Union[str, Path]): + if settings_path is None: + logger.info("No settings path specified, only loading .env end ENVs.") + settings_files = [] + elif isinstance(settings_path, str) or isinstance(settings_path, Path): + settings_files = [settings_path] + elif isinstance(settings_path, list): + settings_files = settings_path + else: + raise ValueError(f"Invalid settings path: {settings_path=}") + + settings_files = lflatten(map(partial(_normalize_and_verify, root_path=root_path), settings_files)) + + return settings_files -def get_all_validators(): +def _normalize_and_verify(settings_path: Path, root_path: Path): + settings_path = Path(settings_path) + root_path = Path(root_path) + + if not settings_path.is_absolute(): + logger.debug(f"Settings path is not absolute, joining with root path: {root_path}") + settings_path = root_path / settings_path + + if settings_path.is_dir(): + logger.debug(f"Settings path is a directory, loading all .toml files in the directory: {settings_path}") + settings_files = list(settings_path.glob("*.toml")) + elif settings_path.is_file(): + logger.debug(f"Settings path is a file, loading specified file: {settings_path}") + settings_files = [settings_path] + else: + raise ValueError(f"Invalid settings path: {settings_path=}, {root_path=}") + + return settings_files + + +def get_or_set_root_path(root_path: Union[str, Path] = None): + env_root_path = os.environ.get("ROOT_PATH") + + if env_root_path and root_path: + if Path(env_root_path) != Path(root_path): + logger.warning( + f"'ROOT_PATH' environment variable is set to {env_root_path}, but a different root_path argument was passed. " + f"Setting new value {root_path}." + ) + os.environ["ROOT_PATH"] = str(root_path) + + elif env_root_path: + root_path = env_root_path + logger.debug(f"'ROOT_PATH' environment variable is set to {root_path}.") + + elif root_path: + logger.info(f"'ROOT_PATH' environment variable is not set, setting to {root_path}.") + os.environ["ROOT_PATH"] = str(root_path) + + else: + root_path = Path.cwd() + logger.info(f"'ROOT_PATH' environment variable is not set, defaulting to working directory {root_path}.") + os.environ["ROOT_PATH"] = str(root_path) + + return root_path + + +def get_pyinfra_validators(): import pyinfra.config.validators return lflatten( @@ -64,10 +132,8 @@ def validate_settings(settings: Dynaconf, validators): def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( - "--settings_path", - "-s", - type=Path, - default=pyinfra_config_path, - help="Path to settings file or folder. Must be a .toml file or a folder containing .toml files.", + "settings_path", + help="Path to settings file(s) or folder(s). Must be .toml file(s) or a folder(s) containing .toml files.", + nargs="+", ) return parser.parse_args() diff --git a/pyinfra/examples.py b/pyinfra/examples.py index 810542d..cec9132 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -2,7 +2,7 @@ from dynaconf import Dynaconf from fastapi import FastAPI from kn_utils.logging import logger -from pyinfra.config.loader import get_all_validators, validate_settings +from pyinfra.config.loader import get_pyinfra_validators, validate_settings from pyinfra.queue.callback import Callback from pyinfra.queue.manager import QueueManager from pyinfra.utils.opentelemetry import instrument_pika, setup_trace @@ -28,7 +28,7 @@ def start_standard_queue_consumer( Workload is received via queue messages and processed by the callback function (see pyinfra.queue.callback for callbacks). """ - validate_settings(settings, get_all_validators()) + validate_settings(settings, get_pyinfra_validators()) logger.info(f"Starting webserver and queue consumer...") diff --git a/scripts/send_request.py b/scripts/send_request.py index b33f9a1..d1f1fda 100644 --- a/scripts/send_request.py +++ b/scripts/send_request.py @@ -4,11 +4,11 @@ from operator import itemgetter from kn_utils.logging import logger -from pyinfra.config.loader import load_settings, pyinfra_config_path +from pyinfra.config.loader import load_settings, local_pyinfra_root_path from pyinfra.queue.manager import QueueManager from pyinfra.storage.storages.s3 import get_s3_storage_from_settings -settings = load_settings(pyinfra_config_path) +settings = load_settings(local_pyinfra_root_path / "config/") def upload_json_and_make_message_body(): diff --git a/tests/conftest.py b/tests/conftest.py index 76b4495..cf4e811 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,14 +2,14 @@ import json import pytest -from pyinfra.config.loader import load_settings, pyinfra_config_path +from pyinfra.config.loader import load_settings, local_pyinfra_root_path from pyinfra.queue.manager import QueueManager from pyinfra.storage.connection import get_storage @pytest.fixture(scope="session") def settings(): - return load_settings(pyinfra_config_path) + return load_settings(local_pyinfra_root_path / "config/") @pytest.fixture(scope="class") diff --git a/tests/unit_test/config_test.py b/tests/unit_test/config_test.py index 363dbac..e21b798 100644 --- a/tests/unit_test/config_test.py +++ b/tests/unit_test/config_test.py @@ -1,9 +1,10 @@ import os +from pathlib import Path import pytest from dynaconf import Validator -from pyinfra.config.loader import load_settings +from pyinfra.config.loader import load_settings, local_pyinfra_root_path, normalize_to_settings_files from pyinfra.config.validators import webserver_validators @@ -22,7 +23,7 @@ class TestConfig: validators = webserver_validators - test_settings = load_settings(validators=validators) + test_settings = load_settings(root_path=local_pyinfra_root_path, validators=validators) assert test_settings.webserver.host == "localhost" @@ -30,7 +31,25 @@ class TestConfig: os.environ["TEST__VALUE__INT"] = "1" os.environ["TEST__VALUE__STR"] = "test" - test_settings = load_settings(validators=test_validators) + test_settings = load_settings(root_path=local_pyinfra_root_path, validators=test_validators) assert test_settings.test.value.int == 1 assert test_settings.test.value.str == "test" + + @pytest.mark.parametrize( + "settings_path,expected_file_paths", + [ + (None, []), + ("config", [f"{local_pyinfra_root_path}/config/settings.toml"]), + ("config/settings.toml", [f"{local_pyinfra_root_path}/config/settings.toml"]), + (f"{local_pyinfra_root_path}/config", [f"{local_pyinfra_root_path}/config/settings.toml"]), + ], + ) + def test_normalize_settings_files(self, settings_path, expected_file_paths): + files = normalize_to_settings_files(settings_path, local_pyinfra_root_path) + print(files) + + assert len(files) == len(expected_file_paths) + + for path, expected in zip(files, expected_file_paths): + assert path == Path(expected).absolute() From f1b8e5a25f81d48a51595db2ba5a45098db934a7 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 30 Jan 2024 13:27:19 +0100 Subject: [PATCH 34/39] refac(arg parse): rename settings parsing fn for clarity --- README.md | 6 +++--- pyinfra/config/loader.py | 2 +- scripts/start_pyinfra.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index dec3e7c..189ad7d 100755 --- a/README.md +++ b/README.md @@ -102,17 +102,17 @@ The callback can be replaced with a custom one, for example if the data to proce and not on the storage. ```python -from pyinfra.config.loader import load_settings, parse_args +from pyinfra.config.loader import load_settings, parse_settings_path from pyinfra.examples import start_standard_queue_consumer from pyinfra.queue.callback import make_download_process_upload_callback, DataProcessor processing_function: DataProcessor -arguments = parse_args() +arguments = parse_settings_path() settings = load_settings(arguments.settings_path) callback = make_download_process_upload_callback(processing_function, settings) -start_standard_queue_consumer(callback, settings) # optionally also pass a fastAPI app object with preconfigured routes +start_standard_queue_consumer(callback, settings) # optionally also pass a fastAPI app object with preconfigured routes ``` diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index 5da779c..8937be9 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -129,7 +129,7 @@ def validate_settings(settings: Dynaconf, validators): logger.debug("Settings validated.") -def parse_args(): +def parse_settings_path(): parser = argparse.ArgumentParser() parser.add_argument( "settings_path", diff --git a/scripts/start_pyinfra.py b/scripts/start_pyinfra.py index 3494294..963b1c9 100644 --- a/scripts/start_pyinfra.py +++ b/scripts/start_pyinfra.py @@ -1,6 +1,6 @@ import time -from pyinfra.config.loader import load_settings, parse_args +from pyinfra.config.loader import load_settings, parse_settings_path from pyinfra.examples import start_standard_queue_consumer from pyinfra.queue.callback import make_download_process_upload_callback @@ -11,7 +11,7 @@ def processor_mock(_data: dict, _message: dict) -> dict: if __name__ == "__main__": - arguments = parse_args() + arguments = parse_settings_path() settings = load_settings(arguments.settings_path) callback = make_download_process_upload_callback(processor_mock, settings) From c1301d287fab8b57f303f5b2397bbee9710e6c6f Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 30 Jan 2024 14:31:08 +0100 Subject: [PATCH 35/39] fix(dependencies): move opentel deps to main since groups are not packaged with CI script --- poetry.lock | 315 +++++++++++++++++++++++-------------------------- pyproject.toml | 22 ++-- 2 files changed, 155 insertions(+), 182 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9b97766..7d3235c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -475,63 +475,63 @@ test = ["pytest"] [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.extras] @@ -648,17 +648,18 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "distlib" @@ -1765,18 +1766,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.1" typing-extensions = ">=4.6.1" [package.extras] @@ -1784,116 +1785,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, ] [package.dependencies] @@ -2531,4 +2506,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.11" -content-hash = "3de2b41d81f76e392559fdad3472d1849534899a9c4ac8454a17c316c6af13ae" +content-hash = "af2b1911ad0272fa16a2927f50cfd0649a8eb7c03278769f8ee6e77fe8ef4f7f" diff --git a/pyproject.toml b/pyproject.toml index 92e3c41..864aaef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,18 +22,7 @@ pycryptodome = "^3.19" kn-utils = { version = "^0.2.7", source = "gitlab-research" } fastapi = "^0.109.0" uvicorn = "^0.26.0" - -[tool.poetry.group.dev.dependencies] -pytest = "^7" -ipykernel = "^6.26.0" -black = "^23.10" -pylint = "^3" -coverage = "^7.3" -requests = "^2.31" -pre-commit = "^3.6.0" - - -[tool.poetry.group.telemetry.dependencies] +# [tool.poetry.group.telemetry.dependencies] opentelemetry-instrumentation-pika = "^0.43b0" opentelemetry-exporter-otlp = "^1.22.0" opentelemetry-instrumentation = "^0.43b0" @@ -44,6 +33,15 @@ opentelemetry-instrumentation-flask = "^0.43b0" opentelemetry-instrumentation-requests = "^0.43b0" opentelemetry-instrumentation-fastapi = "^0.43b0" +[tool.poetry.group.dev.dependencies] +pytest = "^7" +ipykernel = "^6.26.0" +black = "^23.10" +pylint = "^3" +coverage = "^7.3" +requests = "^2.31" +pre-commit = "^3.6.0" + [tool.pytest.ini_options] minversion = "6.0" addopts = "-ra -q" From 88cfb2b1c1a45fee0a8334bb087748f81e63d111 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Tue, 30 Jan 2024 14:52:35 +0100 Subject: [PATCH 36/39] fix(settings): add debug log --- pyinfra/config/loader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index 8937be9..2477add 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -54,6 +54,7 @@ def normalize_to_settings_files(settings_path: Union[str, Path, list], root_path raise ValueError(f"Invalid settings path: {settings_path=}") settings_files = lflatten(map(partial(_normalize_and_verify, root_path=root_path), settings_files)) + logger.debug(f"Normalized settings files: {settings_files}") return settings_files From 7093e019252802c0b709624bec69b4901db22628 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 31 Jan 2024 09:09:13 +0100 Subject: [PATCH 37/39] feat(opentelemetry): add webserver tracing to default pipeline --- README.md | 4 ++-- pyinfra/examples.py | 3 ++- pyinfra/utils/opentelemetry.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 189ad7d..f497131 100755 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ Shared library for the research team, containing code related to infrastructure and communication with other services. Offers a simple interface for processing data and sending responses via AMQP, monitoring via Prometheus and storage -access via S3 or Azure. +access via S3 or Azure. Also export traces via OpenTelemetry for queue messages and webserver requests. To start, see the [complete example](pyinfra/examples.py) which shows how to use all features of the service and can be imported and used directly for default research service pipelines (data ID in message, download data from storage, @@ -66,7 +66,7 @@ configured from environment variables, without additional work in the microservice app, although additional confiuration is possible. -`TRACING_ENDPOINT` should typically be set +`TRACING__OPENTELEMETRY__ENDPOINT` should typically be set to `http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces`. ## Queue Manager diff --git a/pyinfra/examples.py b/pyinfra/examples.py index cec9132..de25bae 100644 --- a/pyinfra/examples.py +++ b/pyinfra/examples.py @@ -5,7 +5,7 @@ from kn_utils.logging import logger from pyinfra.config.loader import get_pyinfra_validators, validate_settings from pyinfra.queue.callback import Callback from pyinfra.queue.manager import QueueManager -from pyinfra.utils.opentelemetry import instrument_pika, setup_trace +from pyinfra.utils.opentelemetry import instrument_pika, setup_trace, instrument_app from pyinfra.webserver.prometheus import ( add_prometheus_endpoint, make_prometheus_processing_time_decorator_from_settings, @@ -45,6 +45,7 @@ def start_standard_queue_consumer( logger.info(f"OpenTelemetry tracing enabled.") setup_trace(settings) instrument_pika() + instrument_app(app) app = add_health_check_endpoint(app, queue_manager.is_ready) diff --git a/pyinfra/utils/opentelemetry.py b/pyinfra/utils/opentelemetry.py index 20341cd..f23f6f5 100644 --- a/pyinfra/utils/opentelemetry.py +++ b/pyinfra/utils/opentelemetry.py @@ -69,5 +69,5 @@ def instrument_pika(): PikaInstrumentor().instrument() -def instrument_app(app: FastAPI): - FastAPIInstrumentor().instrument_app(app) +def instrument_app(app: FastAPI, excluded_urls: str = "/health,/ready,/prometheus"): + FastAPIInstrumentor().instrument_app(app, excluded_urls=excluded_urls) From af914ab3ae9e19ab1a4f3e06f51929115cee9d72 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 31 Jan 2024 10:12:32 +0100 Subject: [PATCH 38/39] fix(argparse): automatically output settings path --- pyinfra/config/loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index 2477add..c7335d6 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -137,4 +137,4 @@ def parse_settings_path(): help="Path to settings file(s) or folder(s). Must be .toml file(s) or a folder(s) containing .toml files.", nargs="+", ) - return parser.parse_args() + return parser.parse_args().settings_path From bfb27383e429819ac2d31afe53c4379e53e29151 Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 31 Jan 2024 10:24:29 +0100 Subject: [PATCH 39/39] fix(settings): change precedence to ENV ROOT_PATH > root_path arg --- pyinfra/config/loader.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/pyinfra/config/loader.py b/pyinfra/config/loader.py index c7335d6..6c07641 100644 --- a/pyinfra/config/loader.py +++ b/pyinfra/config/loader.py @@ -21,6 +21,7 @@ def load_settings( """Load settings from .toml files, .env and environment variables. Also ensures a ROOT_PATH environment variable is set. If ROOT_PATH is not set and no root_path argument is passed, the current working directory is used as root. Settings paths can be a single .toml file, a folder containing .toml files or a list of .toml files and folders. + If a ROOT_PATH environment variable is set, it is not overwritten by the root_path argument. If a folder is passed, all .toml files in the folder are loaded. If settings path is None, only .env and environment variables are loaded. If settings_path are relative paths, they are joined with the root_path argument. """ @@ -82,15 +83,7 @@ def _normalize_and_verify(settings_path: Path, root_path: Path): def get_or_set_root_path(root_path: Union[str, Path] = None): env_root_path = os.environ.get("ROOT_PATH") - if env_root_path and root_path: - if Path(env_root_path) != Path(root_path): - logger.warning( - f"'ROOT_PATH' environment variable is set to {env_root_path}, but a different root_path argument was passed. " - f"Setting new value {root_path}." - ) - os.environ["ROOT_PATH"] = str(root_path) - - elif env_root_path: + if env_root_path: root_path = env_root_path logger.debug(f"'ROOT_PATH' environment variable is set to {root_path}.")