import gzip import json import logging from operator import itemgetter import pika from pyinfra.config import get_config from pyinfra.storage.adapters.s3 import get_s3_storage CONFIG = get_config() logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) def read_connection_params(): credentials = pika.PlainCredentials(CONFIG.rabbitmq_username, CONFIG.rabbitmq_password) parameters = pika.ConnectionParameters( host=CONFIG.rabbitmq_host, port=CONFIG.rabbitmq_port, heartbeat=int(CONFIG.rabbitmq_heartbeat), credentials=credentials, ) return parameters def make_channel(connection) -> pika.adapters.blocking_connection.BlockingChannel: channel = connection.channel() channel.basic_qos(prefetch_count=1) return channel def declare_queue(channel, queue: str): args = {"x-dead-letter-exchange": "", "x-dead-letter-routing-key": CONFIG.dead_letter_queue} return channel.queue_declare(queue=queue, auto_delete=False, durable=True, arguments=args) def make_connection() -> pika.BlockingConnection: parameters = read_connection_params() connection = pika.BlockingConnection(parameters) return connection def upload_and_make_message_body(): bucket = CONFIG.storage_bucket dossier_id, file_id, suffix = "dossier", "file", "json.gz" content = {"key": "value"} object_name = f"{dossier_id}/{file_id}.{suffix}" data = gzip.compress(json.dumps(content).encode("utf-8")) storage = get_s3_storage(CONFIG) if not storage.has_bucket(bucket): storage.make_bucket(bucket) storage.put_object(bucket, object_name, data) message_body = { "dossierId": dossier_id, "fileId": file_id, "targetFileExtension": suffix, "responseFileExtension": f"result.{suffix}", } return message_body def main(): connection = make_connection() channel = make_channel(connection) declare_queue(channel, CONFIG.request_queue) declare_queue(channel, CONFIG.response_queue) message = upload_and_make_message_body() message_encoded = json.dumps(message).encode("utf-8") channel.basic_publish( "", CONFIG.request_queue, # properties=pika.BasicProperties(headers=None), properties=pika.BasicProperties(headers={"x-tenant-id": "redaction"}), body=message_encoded, ) logger.info(f"Put {message} on {CONFIG.request_queue}") storage = get_s3_storage(CONFIG) for method_frame, properties, body in channel.consume(queue=CONFIG.response_queue, inactivity_timeout=10): if not body: break response = json.loads(body) logger.info(f"Received {response}") logger.info(f"Message headers: {properties.headers}") channel.basic_ack(method_frame.delivery_tag) dossier_id, file_id = itemgetter("dossierId", "fileId")(response) suffix = message["responseFileExtension"] result = storage.get_object(CONFIG.storage_bucket, f"{dossier_id}/{file_id}.{suffix}") result = json.loads(gzip.decompress(result)) logger.info(f"Contents of result on storage: {result}") channel.close() if __name__ == "__main__": main()