pyinfra/scripts/mock_client.py
Julius Unverfehrt 391bcc482f Pull request #21: refactoring of prometheus tunneling
Merge in RR/pyinfra from add-prometheus-metrics to master

Squashed commit of the following:

commit 3736e867bfb105f2c2601f6d25343c996027cc5f
Author: Matthias Bisping <matthias.bisping@iqser.com>
Date:   Wed Mar 16 11:35:25 2022 +0100

    removed obsolete config entry

commit dc191b17d863ec4f8009fb130c2c3a78d4116969
Author: Matthias Bisping <matthias.bisping@iqser.com>
Date:   Wed Mar 16 11:34:12 2022 +0100

    removed obsolete dependency

commit 5ba9765e88da7dd15700b211794f433a6f7ea0df
Author: Matthias Bisping <matthias.bisping@iqser.com>
Date:   Wed Mar 16 11:32:37 2022 +0100

    changed error handling for prometheus endpoint

commit 894a6b5d4c7026b9a703a8b2cd70641e7ed7323b
Author: Matthias Bisping <matthias.bisping@iqser.com>
Date:   Wed Mar 16 11:16:39 2022 +0100

    fixed definition order broken by auto-refac; reduced prometheus code to only forwarding to analysis endpoint

commit 9f3c884c75289c7b558e8cc8fb0154b5ddd3a323
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date:   Wed Mar 16 08:59:45 2022 +0100

    black is back

commit 5950799e03f3578ff58f19430494c6b0c223c0f6
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date:   Wed Mar 16 08:59:11 2022 +0100

    add prometheus memory peak monitoring, combine report with analysis report
2022-03-16 11:37:37 +01:00

66 lines
2.1 KiB
Python

import json
import pika
from pyinfra.config import CONFIG
from pyinfra.storage.storages import get_s3_storage
def read_connection_params():
credentials = pika.PlainCredentials(CONFIG.rabbitmq.user, CONFIG.rabbitmq.password)
parameters = pika.ConnectionParameters(
host=CONFIG.rabbitmq.host,
port=CONFIG.rabbitmq.port,
heartbeat=CONFIG.rabbitmq.heartbeat,
credentials=credentials,
)
return parameters
def make_channel(connection) -> pika.adapters.blocking_connection.BlockingChannel:
channel = connection.channel()
channel.basic_qos(prefetch_count=1)
return channel
def declare_queue(channel, queue: str):
args = {"x-dead-letter-exchange": "", "x-dead-letter-routing-key": CONFIG.rabbitmq.queues.dead_letter}
return channel.queue_declare(queue=queue, auto_delete=False, arguments=args)
def make_connection() -> pika.BlockingConnection:
parameters = read_connection_params()
connection = pika.BlockingConnection(parameters)
return connection
def build_message_bodies():
storage = get_s3_storage()
for bucket_name, pdf_name in storage.get_all_object_names(CONFIG.storage.bucket):
file_id = pdf_name.split(".")[0]
dossier_id, file_id = file_id.split("/")
yield json.dumps(
{
"dossierId": dossier_id,
"fileId": file_id,
"targetFileExtension": "ORIGIN.pdf.gz",
"responseFileExtension": "detr.json.gz",
}
).encode()
if __name__ == "__main__":
connection = make_connection()
channel = make_channel(connection)
declare_queue(channel, CONFIG.rabbitmq.queues.input)
declare_queue(channel, CONFIG.rabbitmq.queues.output)
for body in build_message_bodies():
channel.basic_publish("", CONFIG.rabbitmq.queues.input, body)
print(f"Put {body} on {CONFIG.rabbitmq.queues.input}")
for method_frame, _, body in channel.consume(queue=CONFIG.rabbitmq.queues.output):
print(f"Received {json.loads(body)}")
channel.basic_ack(method_frame.delivery_tag)