Update tests
All components from payload processing downwards are tested. Tests that depend on docker compose have been disabled by default because they take too long to use during development. Furthermore, the queue manager tests are not stable, a refactoring with inversion of control is urgently needed to make the components properly testable. The storage tests are stable and should be run once before releasing, this should be implemented via the CI script. Also adds, if present, tenant Id and operation kwargs to storage and queue response.
This commit is contained in:
parent
e580a66347
commit
c09476cfae
@ -149,20 +149,28 @@ def format_service_processing_result_for_storage(payload: QueueMessagePayload, r
|
||||
|
||||
@format_service_processing_result_for_storage.register(LegacyQueueMessagePayload)
|
||||
def _(payload: LegacyQueueMessagePayload, result: Sized) -> dict:
|
||||
processing_kwargs = payload.processing_kwargs or {}
|
||||
x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {}
|
||||
return {
|
||||
"dossierId": payload.dossier_id,
|
||||
"fileId": payload.file_id,
|
||||
"targetFileExtension": payload.target_file_extension,
|
||||
"responseFileExtension": payload.response_file_extension,
|
||||
**x_tenant_id,
|
||||
**processing_kwargs,
|
||||
"data": result,
|
||||
}
|
||||
|
||||
|
||||
@format_service_processing_result_for_storage.register(QueueMessagePayload)
|
||||
def _(payload: QueueMessagePayload, result: Sized) -> dict:
|
||||
processing_kwargs = payload.processing_kwargs or {}
|
||||
x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {}
|
||||
return {
|
||||
"targetFilePath": payload.target_file_path,
|
||||
"responseFilePath": payload.response_file_path,
|
||||
**x_tenant_id,
|
||||
**processing_kwargs,
|
||||
"data": result,
|
||||
}
|
||||
|
||||
@ -173,13 +181,19 @@ def format_to_queue_message_response_body(queue_message_payload: QueueMessagePay
|
||||
|
||||
|
||||
@format_to_queue_message_response_body.register(LegacyQueueMessagePayload)
|
||||
def _(queue_message_payload: LegacyQueueMessagePayload) -> dict:
|
||||
return {"dossierId": queue_message_payload.dossier_id, "fileId": queue_message_payload.file_id}
|
||||
def _(payload: LegacyQueueMessagePayload) -> dict:
|
||||
processing_kwargs = payload.processing_kwargs or {}
|
||||
x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {}
|
||||
return {"dossierId": payload.dossier_id, "fileId": payload.file_id, **x_tenant_id, **processing_kwargs}
|
||||
|
||||
|
||||
@format_to_queue_message_response_body.register(QueueMessagePayload)
|
||||
def _(queue_message_payload: QueueMessagePayload) -> dict:
|
||||
def _(payload: QueueMessagePayload) -> dict:
|
||||
processing_kwargs = payload.processing_kwargs or {}
|
||||
x_tenant_id = {"X-TENANT-ID": payload.x_tenant_id} if payload.x_tenant_id else {}
|
||||
return {
|
||||
"targetFilePath": queue_message_payload.target_file_path,
|
||||
"responseFilePath": queue_message_payload.response_file_path,
|
||||
"targetFilePath": payload.target_file_path,
|
||||
"responseFilePath": payload.response_file_path,
|
||||
**x_tenant_id,
|
||||
**processing_kwargs,
|
||||
}
|
||||
|
||||
@ -4,8 +4,12 @@ from kn_utils.logging import logger
|
||||
from typing import Tuple
|
||||
|
||||
from pyinfra.config import Config
|
||||
from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_info_from_endpoint, StorageInfo, \
|
||||
get_storage_from_storage_info
|
||||
from pyinfra.storage.storage_info import (
|
||||
get_storage_info_from_config,
|
||||
get_storage_info_from_endpoint,
|
||||
StorageInfo,
|
||||
get_storage_from_storage_info,
|
||||
)
|
||||
from pyinfra.storage.storages.interface import Storage
|
||||
|
||||
|
||||
@ -21,10 +25,10 @@ class StorageProvider:
|
||||
)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.connect(*args, **kwargs)
|
||||
return self._connect(*args, **kwargs)
|
||||
|
||||
@lru_cache(maxsize=32)
|
||||
def connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]:
|
||||
def _connect(self, x_tenant_id=None) -> Tuple[Storage, StorageInfo]:
|
||||
storage_info = self._get_storage_info(x_tenant_id)
|
||||
storage_connection = get_storage_from_storage_info(storage_info)
|
||||
return storage_connection, storage_info
|
||||
@ -40,3 +44,12 @@ class StorageProvider:
|
||||
logger.trace(f"{asdict(storage_info)}")
|
||||
|
||||
return storage_info
|
||||
|
||||
|
||||
class StorageProviderMock(StorageProvider):
|
||||
def __init__(self, storage, storage_info):
|
||||
self.storage = storage
|
||||
self.storage_info = storage_info
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.storage, self.storage_info
|
||||
|
||||
36
pyinfra/storage/storages/mock.py
Normal file
36
pyinfra/storage/storages/mock.py
Normal file
@ -0,0 +1,36 @@
|
||||
from pyinfra.storage.storages.interface import Storage
|
||||
|
||||
|
||||
class StorageMock(Storage):
|
||||
def __init__(self, data: bytes = None, file_name: str = None, bucket: str = None):
|
||||
self.data = data
|
||||
self.file_name = file_name
|
||||
self.bucket = bucket
|
||||
|
||||
def make_bucket(self, bucket_name):
|
||||
self.bucket = bucket_name
|
||||
|
||||
def has_bucket(self, bucket_name):
|
||||
return self.bucket == bucket_name
|
||||
|
||||
def put_object(self, bucket_name, object_name, data):
|
||||
self.bucket = bucket_name
|
||||
self.file_name = object_name
|
||||
self.data = data
|
||||
|
||||
def exists(self, bucket_name, object_name):
|
||||
return self.bucket == bucket_name and self.file_name == object_name
|
||||
|
||||
def get_object(self, bucket_name, object_name):
|
||||
return self.data
|
||||
|
||||
def get_all_objects(self, bucket_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def clear_bucket(self, bucket_name):
|
||||
self.bucket = None
|
||||
self.file_name = None
|
||||
self.data = None
|
||||
|
||||
def get_all_object_names(self, bucket_name):
|
||||
raise NotImplementedError
|
||||
5
pytest.ini
Normal file
5
pytest.ini
Normal file
@ -0,0 +1,5 @@
|
||||
[pytest]
|
||||
norecursedirs = tests/tests_with_docker_compose
|
||||
; storage tests and queue manager tests are disabled, since docker compose takes too long to start for regular tests.
|
||||
; They should be run though once before a release.
|
||||
; The queue manager tests are also disabled because they are not stable.
|
||||
@ -1,83 +1,142 @@
|
||||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import pytest
|
||||
import testcontainers.compose
|
||||
|
||||
from pyinfra.config import get_config
|
||||
from pyinfra.storage import get_storage_from_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
TESTS_DIR = Path(__file__).resolve().parents[0]
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def docker_compose(sleep_seconds=30):
|
||||
"""Note: `autouse` can be set to `False` while working on the code to speed up the testing. In that case, run
|
||||
`docker-compose up` in the tests directory manually before running the tests.
|
||||
"""
|
||||
logger.info(f"Starting docker containers with {TESTS_DIR}/docker-compose.yml...")
|
||||
compose = testcontainers.compose.DockerCompose(TESTS_DIR, compose_file_name="docker-compose.yml")
|
||||
compose.start()
|
||||
logger.info(f"Sleeping for {sleep_seconds} seconds to wait for containers to finish startup... ")
|
||||
time.sleep(sleep_seconds)
|
||||
yield compose
|
||||
compose.stop()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_storage_config(storage_backend, bucket_name, monitoring_enabled):
|
||||
config = get_config()
|
||||
config.storage_backend = storage_backend
|
||||
config.storage_bucket = bucket_name
|
||||
config.storage_azureconnectionstring = "DefaultEndpointsProtocol=https;AccountName=iqserdevelopment;AccountKey=4imAbV9PYXaztSOMpIyAClg88bAZCXuXMGJG0GA1eIBpdh2PlnFGoRBnKqLy2YZUSTmZ3wJfC7tzfHtuC6FEhQ==;EndpointSuffix=core.windows.net"
|
||||
config.monitoring_enabled = monitoring_enabled
|
||||
config.prometheus_metric_prefix = "test"
|
||||
config.prometheus_port = 8080
|
||||
config.prometheus_host = "0.0.0.0"
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_queue_config():
|
||||
config = get_config()
|
||||
config.rabbitmq_connection_sleep = 2
|
||||
config.rabbitmq_heartbeat = 4
|
||||
return config
|
||||
from pyinfra.payload_processing.payload import LegacyQueueMessagePayload, QueueMessagePayload
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def payload(x_tenant_id):
|
||||
def legacy_payload(x_tenant_id, optional_processing_kwargs):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"dossierId": "test",
|
||||
"fileId": "test",
|
||||
"targetFileExtension": "json.gz",
|
||||
"responseFileExtension": "json.gz",
|
||||
"targetFileExtension": "target.json.gz",
|
||||
"responseFileExtension": "response.json.gz",
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def response_payload():
|
||||
@pytest.fixture
|
||||
def target_file_path():
|
||||
return "test/test.target.json.gz"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def response_file_path():
|
||||
return "test/test.response.json.gz"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"targetFilePath": target_file_path,
|
||||
"responseFilePath": response_file_path,
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def legacy_queue_response_payload(x_tenant_id, optional_processing_kwargs):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"dossierId": "test",
|
||||
"fileId": "test",
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def storage(test_storage_config):
|
||||
logger.debug("Setup for storage")
|
||||
storage = get_storage_from_config(test_storage_config)
|
||||
storage.make_bucket(test_storage_config.storage_bucket)
|
||||
storage.clear_bucket(test_storage_config.storage_bucket)
|
||||
yield storage
|
||||
logger.debug("Teardown for storage")
|
||||
try:
|
||||
storage.clear_bucket(test_storage_config.storage_bucket)
|
||||
except:
|
||||
pass
|
||||
@pytest.fixture
|
||||
def queue_response_payload(x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"targetFilePath": target_file_path,
|
||||
"responseFilePath": response_file_path,
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def legacy_storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"dossierId": "test",
|
||||
"fileId": "test",
|
||||
"targetFileExtension": "target.json.gz",
|
||||
"responseFileExtension": "response.json.gz",
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
"data": processing_result_json,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def storage_payload(x_tenant_id, optional_processing_kwargs, processing_result_json, target_file_path, response_file_path):
|
||||
x_tenant_entry = {"X-TENANT-ID": x_tenant_id} if x_tenant_id else {}
|
||||
optional_processing_kwargs = optional_processing_kwargs or {}
|
||||
return {
|
||||
"targetFilePath": target_file_path,
|
||||
"responseFilePath": response_file_path,
|
||||
**x_tenant_entry,
|
||||
**optional_processing_kwargs,
|
||||
"data": processing_result_json,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def legacy_parsed_payload(
|
||||
x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path
|
||||
) -> LegacyQueueMessagePayload:
|
||||
return LegacyQueueMessagePayload(
|
||||
dossier_id="test",
|
||||
file_id="test",
|
||||
x_tenant_id=x_tenant_id,
|
||||
target_file_extension="target.json.gz",
|
||||
response_file_extension="response.json.gz",
|
||||
target_file_type="json",
|
||||
target_compression_type="gz",
|
||||
response_file_type="json",
|
||||
response_compression_type="gz",
|
||||
target_file_path=target_file_path,
|
||||
response_file_path=response_file_path,
|
||||
processing_kwargs=optional_processing_kwargs or {},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def parsed_payload(
|
||||
x_tenant_id, optional_processing_kwargs, target_file_path, response_file_path
|
||||
) -> QueueMessagePayload:
|
||||
return QueueMessagePayload(
|
||||
x_tenant_id=x_tenant_id,
|
||||
target_file_type="json",
|
||||
target_compression_type="gz",
|
||||
response_file_type="json",
|
||||
response_compression_type="gz",
|
||||
target_file_path=target_file_path,
|
||||
response_file_path=response_file_path,
|
||||
processing_kwargs=optional_processing_kwargs or {},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def target_json_file() -> bytes:
|
||||
data = {"target": "test"}
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
compr_data = gzip.compress(enc_data)
|
||||
return compr_data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def processing_result_json() -> dict:
|
||||
return {"response": "test"}
|
||||
|
||||
@ -1,48 +0,0 @@
|
||||
from functools import lru_cache
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def func(callback):
|
||||
return callback()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def fn(maxsize):
|
||||
return lru_cache(maxsize)(func)
|
||||
|
||||
|
||||
@pytest.fixture(params=[1, 2, 5])
|
||||
def maxsize(request):
|
||||
return request.param
|
||||
|
||||
|
||||
class Callback:
|
||||
def __init__(self, x):
|
||||
self.initial_x = x
|
||||
self.x = x
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.x += 1
|
||||
return self.x
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.initial_x)
|
||||
|
||||
|
||||
def test_adding_to_cache_within_maxsize_does_not_overwrite(fn, maxsize):
|
||||
c = Callback(0)
|
||||
for i in range(maxsize):
|
||||
assert fn(c) == 1
|
||||
assert fn(c) == 1
|
||||
|
||||
|
||||
def test_adding_to_cache_more_than_maxsize_does_overwrite(fn, maxsize):
|
||||
|
||||
callbacks = [Callback(i) for i in range(maxsize)]
|
||||
|
||||
for i in range(maxsize):
|
||||
assert fn(callbacks[i]) == i + 1
|
||||
|
||||
assert fn(Callback(maxsize)) == maxsize + 1
|
||||
assert fn(callbacks[0]) == 2
|
||||
@ -1,54 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from pyinfra.payload_processing.payload import (
|
||||
QueueMessagePayloadParser,
|
||||
LegacyQueueMessagePayload,
|
||||
)
|
||||
from pyinfra.utils.file_extension_parsing import make_file_extension_parser
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def expected_parsed_payload(x_tenant_id):
|
||||
return LegacyQueueMessagePayload(
|
||||
dossier_id="test",
|
||||
file_id="test",
|
||||
x_tenant_id=x_tenant_id,
|
||||
target_file_extension="json.gz",
|
||||
response_file_extension="json.gz",
|
||||
target_file_type="json",
|
||||
target_compression_type="gz",
|
||||
response_file_type="json",
|
||||
response_compression_type="gz",
|
||||
target_file_path="test/test.json.gz",
|
||||
response_file_path="test/test.json.gz",
|
||||
processing_kwargs={},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_extension_parser(allowed_file_types, allowed_compression_types):
|
||||
return make_file_extension_parser(allowed_file_types, allowed_compression_types)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def payload_parser(file_extension_parser):
|
||||
return QueueMessagePayloadParser(file_extension_parser, allowed_processing_parameters=["operation"])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("allowed_file_types,allowed_compression_types", [(["json", "pdf"], ["gz"])])
|
||||
class TestPayload:
|
||||
@pytest.mark.parametrize("x_tenant_id", [None, "klaus"])
|
||||
def test_payload_is_parsed_correctly(self, payload_parser, payload, expected_parsed_payload):
|
||||
payload = payload_parser(payload)
|
||||
assert payload == expected_parsed_payload
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"extension,expected",
|
||||
[
|
||||
("json.gz", ("json", "gz")),
|
||||
("json", ("json", None)),
|
||||
("prefix.json.gz", ("json", "gz")),
|
||||
],
|
||||
)
|
||||
def test_parse_file_extension(self, file_extension_parser, extension, expected):
|
||||
assert file_extension_parser(extension) == expected
|
||||
@ -1,76 +0,0 @@
|
||||
import gzip
|
||||
import json
|
||||
from operator import itemgetter
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from pyinfra.payload_processing.processor import make_payload_processor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def target_file():
|
||||
contents = {"numberOfPages": 10, "content1": "value1", "content2": "value2"}
|
||||
return gzip.compress(json.dumps(contents).encode("utf-8"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_names(payload):
|
||||
dossier_id, file_id, target_suffix, response_suffix = itemgetter(
|
||||
"dossierId",
|
||||
"fileId",
|
||||
"targetFileExtension",
|
||||
"responseFileExtension",
|
||||
)(payload)
|
||||
return f"{dossier_id}/{file_id}.{target_suffix}", f"{dossier_id}/{file_id}.{response_suffix}"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def payload_processor(test_storage_config):
|
||||
def file_processor_mock(json_file: dict):
|
||||
return [json_file]
|
||||
|
||||
yield make_payload_processor(file_processor_mock, test_storage_config)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("storage_backend", ["s3"], scope="session")
|
||||
@pytest.mark.parametrize("bucket_name", ["testbucket"], scope="session")
|
||||
@pytest.mark.parametrize("monitoring_enabled", [True, False], scope="session")
|
||||
@pytest.mark.parametrize("x_tenant_id", [None])
|
||||
class TestPayloadProcessor:
|
||||
def test_payload_processor_yields_correct_response_and_uploads_result(
|
||||
self,
|
||||
payload_processor,
|
||||
storage,
|
||||
bucket_name,
|
||||
payload,
|
||||
response_payload,
|
||||
target_file,
|
||||
file_names,
|
||||
):
|
||||
storage.clear_bucket(bucket_name)
|
||||
storage.put_object(bucket_name, file_names[0], target_file)
|
||||
response = payload_processor(payload)
|
||||
|
||||
assert response == response_payload
|
||||
|
||||
data_received = storage.get_object(bucket_name, file_names[1])
|
||||
|
||||
assert json.loads((gzip.decompress(data_received)).decode("utf-8")) == {
|
||||
**payload,
|
||||
"data": [json.loads(gzip.decompress(target_file).decode("utf-8"))],
|
||||
}
|
||||
|
||||
def test_catching_of_processing_failure(self, payload_processor, storage, bucket_name, payload):
|
||||
storage.clear_bucket(bucket_name)
|
||||
with pytest.raises(Exception):
|
||||
payload_processor(payload)
|
||||
|
||||
def test_prometheus_endpoint_is_available(
|
||||
self, test_storage_config, monitoring_enabled, storage_backend, x_tenant_id
|
||||
):
|
||||
if monitoring_enabled:
|
||||
resp = requests.get(
|
||||
f"http://{test_storage_config.prometheus_host}:{test_storage_config.prometheus_port}/prometheus"
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
32
tests/unit_tests/file_extension_parsing_test.py
Normal file
32
tests/unit_tests/file_extension_parsing_test.py
Normal file
@ -0,0 +1,32 @@
|
||||
import pytest
|
||||
|
||||
from pyinfra.utils.file_extension_parsing import make_file_extension_parser
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_extension_parser(file_types, compression_types):
|
||||
return make_file_extension_parser(file_types, compression_types)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"file_path,file_types,compression_types,expected_file_extension,expected_compression_extension",
|
||||
[
|
||||
("test.txt", ["txt"], ["gz"], "txt", None),
|
||||
("test.txt.gz", ["txt"], ["gz"], "txt", "gz"),
|
||||
("test.txt.gz", [], [], None, None),
|
||||
("test.txt.gz", ["txt"], [], "txt", None),
|
||||
("test.txt.gz", [], ["gz"], None, "gz"),
|
||||
("test", ["txt"], ["gz"], None, None),
|
||||
],
|
||||
)
|
||||
def test_file_extension_parsing(
|
||||
file_extension_parser,
|
||||
file_path,
|
||||
file_types,
|
||||
compression_types,
|
||||
expected_file_extension,
|
||||
expected_compression_extension,
|
||||
):
|
||||
file_extension, compression_extension = file_extension_parser(file_path)
|
||||
assert file_extension == expected_file_extension
|
||||
assert compression_extension == expected_compression_extension
|
||||
48
tests/unit_tests/payload_test.py
Normal file
48
tests/unit_tests/payload_test.py
Normal file
@ -0,0 +1,48 @@
|
||||
import pytest
|
||||
|
||||
from pyinfra.config import get_config
|
||||
from pyinfra.payload_processing.payload import (
|
||||
get_queue_message_payload_parser,
|
||||
format_to_queue_message_response_body,
|
||||
format_service_processing_result_for_storage,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def payload_parser():
|
||||
config = get_config()
|
||||
return get_queue_message_payload_parser(config)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("x_tenant_id", [None, "klaus"])
|
||||
@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}])
|
||||
class TestPayloadParsing:
|
||||
def test_legacy_payload_parsing(self, payload_parser, legacy_payload, legacy_parsed_payload):
|
||||
parsed_payload = payload_parser(legacy_payload)
|
||||
assert parsed_payload == legacy_parsed_payload
|
||||
|
||||
def test_payload_parsing(self, payload_parser, payload, parsed_payload):
|
||||
parsed_payload = payload_parser(payload)
|
||||
assert parsed_payload == parsed_payload
|
||||
|
||||
|
||||
@pytest.mark.parametrize("x_tenant_id", [None, "klaus"])
|
||||
@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}])
|
||||
class TestPayloadFormatting:
|
||||
def test_legacy_payload_formatting_for_response(self, legacy_parsed_payload, legacy_queue_response_payload):
|
||||
formatted_payload = format_to_queue_message_response_body(legacy_parsed_payload)
|
||||
assert formatted_payload == legacy_queue_response_payload
|
||||
|
||||
def test_payload_formatting_for_response(self, parsed_payload, queue_response_payload):
|
||||
formatted_payload = format_to_queue_message_response_body(parsed_payload)
|
||||
assert formatted_payload == queue_response_payload
|
||||
|
||||
def test_legacy_payload_formatting_for_storage(
|
||||
self, legacy_parsed_payload, processing_result_json, legacy_storage_payload
|
||||
):
|
||||
formatted_payload = format_service_processing_result_for_storage(legacy_parsed_payload, processing_result_json)
|
||||
assert formatted_payload == legacy_storage_payload
|
||||
|
||||
def test_payload_formatting_for_storage(self, parsed_payload, processing_result_json, storage_payload):
|
||||
formatted_payload = format_service_processing_result_for_storage(parsed_payload, processing_result_json)
|
||||
assert formatted_payload == storage_payload
|
||||
81
tests/unit_tests/processor_test.py
Normal file
81
tests/unit_tests/processor_test.py
Normal file
@ -0,0 +1,81 @@
|
||||
import gzip
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from pyinfra.config import get_config
|
||||
from pyinfra.payload_processing.payload import get_queue_message_payload_parser
|
||||
from pyinfra.payload_processing.processor import PayloadProcessor
|
||||
from pyinfra.storage.storage_info import StorageInfo
|
||||
from pyinfra.storage.storage_provider import StorageProviderMock
|
||||
from pyinfra.storage.storages.mock import StorageMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bucket_name():
|
||||
return "test_bucket"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def storage_mock(target_json_file, target_file_path, bucket_name):
|
||||
storage = StorageMock(target_json_file, target_file_path, bucket_name)
|
||||
return storage
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def storage_info_mock(bucket_name):
|
||||
return StorageInfo(bucket_name)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def data_processor_mock(processing_result_json):
|
||||
def inner(data, **kwargs):
|
||||
return processing_result_json
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def payload_processor(storage_mock, storage_info_mock, data_processor_mock):
|
||||
storage_provider = StorageProviderMock(storage_mock, storage_info_mock)
|
||||
payload_parser = get_queue_message_payload_parser(get_config())
|
||||
return PayloadProcessor(storage_provider, payload_parser, data_processor_mock)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("x_tenant_id", [None, "klaus"])
|
||||
@pytest.mark.parametrize("optional_processing_kwargs", [{}, {"operation": "test"}])
|
||||
class TestPayloadProcessor:
|
||||
def test_payload_processor_yields_correct_response_and_uploads_result_for_legacy_message(
|
||||
self,
|
||||
payload_processor,
|
||||
storage_mock,
|
||||
bucket_name,
|
||||
response_file_path,
|
||||
legacy_payload,
|
||||
legacy_queue_response_payload,
|
||||
legacy_storage_payload,
|
||||
):
|
||||
response = payload_processor(legacy_payload)
|
||||
|
||||
assert response == legacy_queue_response_payload
|
||||
|
||||
data_stored = storage_mock.get_object(bucket_name, response_file_path)
|
||||
|
||||
assert json.loads(gzip.decompress(data_stored).decode()) == legacy_storage_payload
|
||||
|
||||
def test_payload_processor_yields_correct_response_and_uploads_result(
|
||||
self,
|
||||
payload_processor,
|
||||
storage_mock,
|
||||
bucket_name,
|
||||
response_file_path,
|
||||
payload,
|
||||
queue_response_payload,
|
||||
storage_payload,
|
||||
):
|
||||
response = payload_processor(payload)
|
||||
|
||||
assert response == queue_response_payload
|
||||
|
||||
data_stored = storage_mock.get_object(bucket_name, response_file_path)
|
||||
|
||||
assert json.loads(gzip.decompress(data_stored).decode()) == storage_payload
|
||||
Loading…
x
Reference in New Issue
Block a user