pyinfra/test/unit_tests/server/pipeline_test.py
Matthias Bisping 6cb13051eb fixed following bugs:
- upper() did yield instead of return
 - metdadata was not repeated when zipping with results generator
 - since test metadata was empty dict,  target data was therefore empty always, since results were zipped with {}
 - hence added check for target lengths > 0
 - fixed return value of queued stream function dispatcher; only returned first item of 1 -> n results
2022-05-17 21:48:16 +02:00

100 lines
3.5 KiB
Python

import pytest
from funcy import rcompose, compose
from pyinfra.server.buffering.stream import FlatStreamBuffer
from pyinfra.server.client_pipeline import ClientPipeline
from pyinfra.server.dispatcher.dispatcher import Nothing
from pyinfra.server.dispatcher.dispatchers.queue import QueuedStreamFunctionDispatcher
from pyinfra.server.dispatcher.dispatchers.rest import RestDispatcher
from pyinfra.server.interpreter.interpreters.identity import IdentityInterpreter
from pyinfra.server.interpreter.interpreters.rest_callback import RestPickupStreamer
from pyinfra.server.packer.packers.rest import RestPacker
from pyinfra.server.packing import unpack
from pyinfra.server.receiver.receivers.identity import QueuedStreamFunctionReceiver
from pyinfra.server.receiver.receivers.rest import RestReceiver
from pyinfra.server.stream.queued_stream_function import QueuedStreamFunction
from pyinfra.server.utils import make_streamable_and_wrap_in_packing_logic
from pyinfra.utils.func import lift, llift
def test_mock_pipeline():
data = [1, 2, 3]
f, g, h, u = map(lift, [lambda x: x ** 2, lambda x: x + 2, lambda x: x / 2, lambda x: x])
pipeline = ClientPipeline(f, g, h, u)
assert list(pipeline(data)) == list(rcompose(f, g, h, u)(data))
@pytest.mark.parametrize("client_pipeline_type", ["rest", "basic"])
def test_pipeline(
core_operation, client_pipeline, input_data_items, metadata, targets, item_type, one_to_many, n_items
):
if core_operation is Nothing:
pytest.skip(f"No operation defined for parameter combination: {item_type=}, {one_to_many=}")
output = compose(llift(unpack), client_pipeline)(input_data_items, metadata)
if n_items > 0:
assert len(output) > 0
assert output == targets
@pytest.mark.parametrize("item_type", ["string"])
@pytest.mark.parametrize("n_items", [1])
def test_pipeline_is_lazy(input_data_items, metadata, basic_client_pipeline, buffer_size):
def lazy_test_fn(*args, **kwargs):
probe["executed"] = True
return b"null", {}
probe = {"executed": False}
stream_function = make_streamable_and_wrap_in_packing_logic(lazy_test_fn, batched=False)
client_pipeline = get_basic_client_pipeline(stream_function, buffer_size=buffer_size)
output = client_pipeline(input_data_items, metadata)
assert not probe["executed"]
list(output)
assert probe["executed"]
@pytest.fixture
def client_pipeline(rest_client_pipeline, basic_client_pipeline, client_pipeline_type):
if client_pipeline_type == "rest":
return rest_client_pipeline
elif client_pipeline_type == "basic":
return basic_client_pipeline
@pytest.fixture
def rest_client_pipeline(server_process, endpoint, rest_interpreter):
"""Requires a webserver to listen on `endpoint`"""
return ClientPipeline(RestPacker(), RestDispatcher(endpoint), RestReceiver(), rest_interpreter)
@pytest.fixture
def basic_client_pipeline(endpoint, rest_interpreter, server_stream_function, buffer_size):
return get_basic_client_pipeline(server_stream_function, buffer_size)
def get_basic_client_pipeline(stream_function, buffer_size=3):
return ClientPipeline(
RestPacker(),
QueuedStreamFunctionDispatcher(
QueuedStreamFunction(
FlatStreamBuffer(
stream_function,
buffer_size=buffer_size,
),
),
),
QueuedStreamFunctionReceiver(),
IdentityInterpreter(),
)
@pytest.fixture
def rest_interpreter():
return rcompose(RestPickupStreamer(), RestReceiver())