Merge in RR/pyinfra from RED-6205-monitoring to master
Squashed commit of the following:
commit 529cedfd7c065a3f7364e4596b923f25f0af76b5
Author: Matthias Bisping <matthias.bisping@axbit.com>
Date: Thu Mar 16 14:57:26 2023 +0100
Remove unnecessary default argument to dict.get
commit b718531f568e89df77cc05039e5e7afe7111b9a4
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Thu Mar 16 14:56:50 2023 +0100
refactor
commit c039b0c25a6cd2ad2a72d237d0930c484c8e427c
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Thu Mar 16 13:22:17 2023 +0100
increase package version to reflect the recent changes
commit 0a983a4113f25cd692b68869e1f33ffbf7efc6f0
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Thu Mar 16 13:16:39 2023 +0100
remove processing result conversion to a list, since ner-predicion service actually returns a dictionary. It is now expected that the result is sized to perform the monitoring and json dumpable to upload it.
commit 541bf321410471dc09a354669b2778402286c09f
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Thu Mar 16 12:48:07 2023 +0100
remove no longer needed requirements
commit cfa182985d989a5b92a9a069a603daee72f37d49
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Thu Mar 16 11:14:58 2023 +0100
refactor payload formatting
- introduce PayloadFormatter class for better typehinting and bundling
of functionality
- parametrize payload formatting so the PayloadProcesser can adapt
better to differnt services/products
- move file extension parsing to its own module
commit f57663b86954b7164eeb6db013d862af88ec4584
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Wed Mar 15 12:22:08 2023 +0100
refactor payload parsing
- introduce QueueMessagePayloadParser for generality
and typehinting
- refactor file extension parsing algorithm
commit 713fb4a0dddecf5442ceda3988444d9887869dcf
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 14 17:07:02 2023 +0100
fix tests
commit a22ecf7ae93bc0bec235fba3fd9cbf6c1778aa13
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 14 16:31:26 2023 +0100
refactor payload parsing
- parameterize file and compression types allowed for files to download
and upload via config
- make a real value bag out of QueueMessagePayload and do the parsing
beforehand
- refector file extension parser to be more robust
commit 50b578d054ca47a94c907f5f8b585eca7ed626ac
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 14 13:21:32 2023 +0100
add monitoring
- add an optional prometheus monitor to monitor the average processing
time of a service per relevent paramater that is at this point defined
via the number of resulting elements.
commit de525e7fa2f846f7fde5b9a4b466039238da10cd
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 14 12:57:24 2023 +0100
fix bug in file extension parser not working if the file endings have prefixes
76 lines
2.3 KiB
Python
76 lines
2.3 KiB
Python
import gzip
|
|
import json
|
|
from operator import itemgetter
|
|
|
|
import pytest
|
|
import requests
|
|
|
|
from pyinfra.payload_processing.processor import make_payload_processor
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
def file_processor_mock():
|
|
def inner(json_file: dict):
|
|
return [json_file]
|
|
|
|
return inner
|
|
|
|
|
|
@pytest.fixture
|
|
def target_file():
|
|
contents = {"numberOfPages": 10, "content1": "value1", "content2": "value2"}
|
|
return gzip.compress(json.dumps(contents).encode("utf-8"))
|
|
|
|
|
|
@pytest.fixture
|
|
def file_names(request_payload):
|
|
dossier_id, file_id, target_suffix, response_suffix = itemgetter(
|
|
"dossierId",
|
|
"fileId",
|
|
"targetFileExtension",
|
|
"responseFileExtension",
|
|
)(request_payload)
|
|
return f"{dossier_id}/{file_id}.{target_suffix}", f"{dossier_id}/{file_id}.{response_suffix}"
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
def payload_processor(file_processor_mock, processing_config):
|
|
yield make_payload_processor(file_processor_mock, processing_config)
|
|
|
|
|
|
@pytest.mark.parametrize("client_name", ["s3"], scope="session")
|
|
@pytest.mark.parametrize("monitoring_enabled", [True, False], scope="session")
|
|
class TestPayloadProcessor:
|
|
def test_payload_processor_yields_correct_response_and_uploads_result(
|
|
self,
|
|
payload_processor,
|
|
storage,
|
|
bucket_name,
|
|
request_payload,
|
|
response_payload,
|
|
target_file,
|
|
file_names,
|
|
):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, file_names[0], target_file)
|
|
response = payload_processor(request_payload)
|
|
|
|
assert response == response_payload
|
|
|
|
data_received = storage.get_object(bucket_name, file_names[1])
|
|
|
|
assert json.loads((gzip.decompress(data_received)).decode("utf-8")) == {
|
|
**request_payload,
|
|
"data": [json.loads(gzip.decompress(target_file).decode("utf-8"))],
|
|
}
|
|
|
|
def test_catching_of_processing_failure(self, payload_processor, storage, bucket_name, request_payload):
|
|
storage.clear_bucket(bucket_name)
|
|
with pytest.raises(Exception):
|
|
payload_processor(request_payload)
|
|
|
|
def test_prometheus_endpoint_is_available(self, processing_config):
|
|
resp = requests.get(
|
|
f"http://{processing_config.prometheus_host}:{processing_config.prometheus_port}/prometheus"
|
|
)
|
|
assert resp.status_code == 200 |