refactoring; extended partial posting by image payload data

This commit is contained in:
Matthias Bisping 2022-04-27 16:29:52 +02:00
parent fa4e5e5e0e
commit ab5839a126
11 changed files with 83 additions and 24 deletions

View File

@ -6,8 +6,7 @@ from waitress import serve
from pyinfra.config import CONFIG
logger = logging.getLogger(__file__)
logger.setLevel(CONFIG.service.logging_level)
logger = logging.getLogger()
def run_probing_webserver(app, host=None, port=None, mode=None):

View File

@ -10,8 +10,7 @@ from pyinfra.queue.queue_manager.queue_manager import QueueHandle, QueueManager
logger = logging.getLogger("pika")
logger.setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
logger.setLevel(CONFIG.service.logging_level)
logger = logging.getLogger()
def monkey_patch_queue_handle(channel, queue) -> QueueHandle:
@ -129,14 +128,14 @@ class PikaQueueManager(QueueManager):
def consume_and_publish(self, visitor):
logger.info(f"Consuming with callback {visitor.callback.__name__}")
logger.info(f"Consuming input queue.")
for message in self.consume():
self.publish_response(message, visitor)
def basic_consume_and_publish(self, visitor):
logger.info(f"Basic consuming with callback {visitor.callback.__name__}")
logger.info(f"Basic consuming input queue.")
def callback(channel, frame, properties, body):
message = (frame, properties, body)

View File

@ -6,7 +6,7 @@ from azure.storage.blob import ContainerClient, BlobServiceClient
from pyinfra.storage.adapters.adapter import StorageAdapter
logger = logging.getLogger(__name__)
logger = logging.getLogger()
logging.getLogger("azure").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)

View File

@ -7,7 +7,7 @@ from minio import Minio
from pyinfra.storage.adapters.adapter import StorageAdapter
logger = logging.getLogger(__name__)
logger = logging.getLogger()
class S3StorageAdapter(StorageAdapter):

View File

@ -20,8 +20,8 @@ from test.queue.queue_manager_mock import QueueManagerMock
from test.storage.adapter_mock import StorageAdapterMock
from test.storage.client_mock import StorageClientMock
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logging.basicConfig()
logger = logging.getLogger()
pytest_plugins = [
@ -31,6 +31,11 @@ pytest_plugins = [
]
@pytest.fixture(autouse=True)
def mute_logger():
logger.setLevel(logging.CRITICAL + 1)
@pytest.fixture(scope="session")
def bucket_name():
return "pyinfra-test-bucket"

View File

@ -1,4 +1,5 @@
import json
import logging
from itertools import chain
from operator import methodcaller, itemgetter
from typing import Iterable
@ -9,12 +10,15 @@ from funcy import curry, rcompose, compose, lmap
from test.utils.server import bytes_to_string, string_to_bytes
logger = logging.getLogger("PIL.PngImagePlugin")
logger.setLevel(logging.INFO)
def lift(fn):
return curry(map)(fn)
def send_partial_request(url, input_data: Iterable[bytes], metadata):
def post_partial(url, input_data: Iterable[bytes], metadata):
def pack(data: bytes):
package = {"data": bytes_to_string(data), "metadata": metadata}
package = json.dumps(package).encode()
@ -40,9 +44,12 @@ def send_partial_request(url, input_data: Iterable[bytes], metadata):
return input_data_to_result_data((*input_data, b""))
@pytest.mark.parametrize("item_type", ["string"])
def test_sending_partial_request(url, data_items, metadata, operation):
assert list(send_partial_request(f"{url}/process", data_items, metadata)) == lmap(operation, data_items)
@pytest.mark.parametrize("item_type", ["string", "image"])
def test_sending_partial_request(url, data_items, metadata, operation, server_process):
expected = lmap(operation, data_items)
output = list(post_partial(f"{url}/process", data_items, metadata))
assert output == expected
@pytest.fixture

View File

@ -1,6 +1,12 @@
from itertools import repeat
import numpy as np
import pytest
from PIL import Image
from funcy import lmap, compose
from test.utils.image import image_to_bytes
@pytest.fixture
@ -14,4 +20,20 @@ def data(data_type, pdf):
@pytest.fixture
def data_items(item_type):
if item_type == "string":
return list(repeat(b"content", 7))
return [bytes(f"content{i}", encoding="utf8") for i in range(7)]
elif item_type == "image":
return images()
else:
raise ValueError(f"Unknown item type {item_type}")
def array_to_image(array) -> Image.Image:
return Image.fromarray(np.uint8(array * 255), mode="RGB")
def input_batch():
return np.random.random_sample(size=(7, 3, 30, 30))
def images():
return lmap(compose(image_to_bytes, array_to_image), input_batch())

View File

@ -1,3 +1,4 @@
import io
import json
import socket
from collections import deque
@ -7,10 +8,12 @@ from operator import itemgetter
import flask
import pytest
import requests
from PIL import Image
from funcy import retry, compose, repeatedly, lmap
from waitress import serve
from test.server import set_up_processing_server
from test.utils.image import image_to_bytes
from test.utils.server import bytes_to_string, string_to_bytes
@ -41,9 +44,27 @@ def server(processor_fn):
@pytest.fixture
def processor_fn(item_type, data_items):
def processor_fn(operation):
return make_processor(operation)
@pytest.fixture
def operation(item_type):
def rotate(im: bytes):
im = Image.open(io.BytesIO(im))
return image_to_bytes(im.rotate(90))
if item_type == "string":
return make_processor(operation=lambda x: x.decode().upper().encode())
return lambda s: s.decode().upper().encode()
elif item_type == "image":
return rotate
else:
raise ValueError(f"No operation specified for item type {item_type}")
@pytest.fixture(params=["string"])
def item_type(request):
return request.param
def make_processor(operation, buffer_size=3):
@ -51,7 +72,7 @@ def make_processor(operation, buffer_size=3):
data = get_data_from_request(request)
items = repeatedly(buffer.pop, len(buffer)) if consume_buffer_now(buffer, data) else []
items = repeatedly(buffer.popleft, len(buffer)) if consume_buffer_now(buffer, data) else []
result = lmap(compose(bytes_to_string, operation), items)
@ -84,7 +105,7 @@ def server_ready(url):
return response.status_code == 200
@pytest.fixture(autouse=True, scope="function")
@pytest.fixture(autouse=False, scope="function")
def server_process(server, host_and_port, url):
def get_server_process():
return Process(target=serve, kwargs={"app": server, **host_and_port})

View File

@ -1,5 +1,4 @@
import gzip
import json
import logging
from operator import itemgetter
@ -9,8 +8,7 @@ from pyinfra.exceptions import ProcessingFailure
from pyinfra.queue.consumer import Consumer
from pyinfra.visitor import get_object_descriptor, ForwardingStrategy
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger = logging.getLogger()
@pytest.fixture(scope="session")

View File

@ -2,8 +2,7 @@ import logging
import pytest
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger = logging.getLogger()
@pytest.mark.parametrize("client_name", ["mock", "azure", "s3"], scope="session")

9
test/utils/image.py Normal file
View File

@ -0,0 +1,9 @@
import io
from PIL import Image
def image_to_bytes(image: Image.Image):
bio = io.BytesIO()
image.save(bio, format="png")
return bio.getvalue()