52 lines
1.7 KiB
Python

import gzip
import json
import logging
import pytest
from funcy import notnone
from pyinfra.default_objects import get_visitor, get_queue_manager, get_storage, get_consumer, get_callback
from pyinfra.server.packing import bytes_to_string, string_to_bytes
from pyinfra.visitor import get_object_descriptor
from test.utils.input import adorn_data_with_storage_info
logger = logging.getLogger(__name__)
@pytest.mark.parametrize("item_type", ["string"])
@pytest.mark.parametrize("one_to_many", [True])
def test_serving(server_process, input_data_items, bucket_name, endpoint):
callback = get_callback(endpoint)
visitor = get_visitor(callback)
queue_manager = get_queue_manager()
storage = get_storage()
consumer = get_consumer(callback)
queue_manager.clear()
storage.clear_bucket(bucket_name)
items = adorn_data_with_storage_info(input_data_items)
for data, message in items:
storage.put_object(**get_object_descriptor(message), data=gzip.compress(data))
queue_manager.publish_request(message)
reqs = consumer.consume(inactivity_timeout=5)
for itm, req in zip(items, reqs):
logger.debug(f"Processing item {itm}")
queue_manager.publish_response(req, visitor)
def decode(storage_item):
repr = gzip.decompress(storage_item).decode().replace(r'\"', "'").replace('"', "").replace("'", '"')
try:
return json.loads(repr)
except json.decoder.JSONDecodeError:
return None
print()
for storage_item in [*filter(notnone, map(decode, storage.get_all_objects(bucket_name)))]:
storage_item["data"] = string_to_bytes(storage_item["data"])
print(storage_item)