import pytest from funcy import rcompose, compose, project, second, merge from pyinfra.server.buffering.stream import FlatStreamBuffer from pyinfra.server.client_pipeline import ClientPipeline from pyinfra.server.dispatcher.dispatcher import Nothing from pyinfra.server.dispatcher.dispatchers.queue import QueuedStreamFunctionDispatcher from pyinfra.server.dispatcher.dispatchers.rest import RestDispatcher from pyinfra.server.interpreter.interpreters.identity import IdentityInterpreter from pyinfra.server.interpreter.interpreters.rest_callback import RestPickupStreamer from pyinfra.server.packer.packers.rest import RestPacker from pyinfra.server.packing import unpack from pyinfra.server.receiver.receivers.identity import QueuedStreamFunctionReceiver from pyinfra.server.receiver.receivers.rest import RestReceiver from pyinfra.server.stream.queued_stream_function import QueuedStreamFunction from pyinfra.server.utils import make_streamable_and_wrap_in_packing_logic from pyinfra.utils.func import lift, llift from test.utils.input import pair_data_with_queue_message def test_mock_pipeline(): data = [1, 2, 3] f, g, h, u = map(lift, [lambda x: x**2, lambda x: x + 2, lambda x: x / 2, lambda x: x]) pipeline = ClientPipeline(f, g, h, u) assert list(pipeline(data)) == list(rcompose(f, g, h, u)(data)) @pytest.mark.parametrize("client_pipeline_type", ["basic", "rest"]) @pytest.mark.parametrize("server_side_test", [True]) def test_pipeline(core_operation, client_pipeline, input_data_items, metadata, targets, n_items): assert core_operation is not Nothing metadata = add_queue_metadata_to_server_side_metadata(metadata) output = compose(llift(unpack), client_pipeline)(input_data_items, metadata) assert n_items == 0 or len(output) > 0 assert output == targets def add_queue_metadata_to_server_side_metadata(metadata): return [ merge(project(second(mdt), [*mdt_o.keys(), "pages"]), mdt_o) for mdt, mdt_o in zip(pair_data_with_queue_message(metadata), metadata) ] @pytest.mark.parametrize("item_type", ["string"]) @pytest.mark.parametrize("n_items", [1]) def test_pipeline_is_lazy(input_data_items, metadata, basic_client_pipeline, buffer_size): def lazy_test_fn(*args, **kwargs): probe["executed"] = True return b"null", {} probe = {"executed": False} stream_function = make_streamable_and_wrap_in_packing_logic(lazy_test_fn, batched=False) client_pipeline = get_basic_client_pipeline(stream_function, buffer_size=buffer_size) output = client_pipeline(input_data_items, metadata) assert not probe["executed"] list(output) assert probe["executed"] @pytest.fixture def client_pipeline(rest_client_pipeline, basic_client_pipeline, client_pipeline_type): if client_pipeline_type == "rest": return rest_client_pipeline elif client_pipeline_type == "basic": return basic_client_pipeline @pytest.fixture def rest_client_pipeline(server_process, endpoint, rest_interpreter): """Requires a webserver to listen on `endpoint`""" return ClientPipeline(RestPacker(), RestDispatcher(endpoint), RestReceiver(), rest_interpreter) @pytest.fixture def basic_client_pipeline(endpoint, rest_interpreter, server_stream_function, buffer_size): return get_basic_client_pipeline(server_stream_function, buffer_size) def get_basic_client_pipeline(stream_function, buffer_size=3): return ClientPipeline( RestPacker(), QueuedStreamFunctionDispatcher( QueuedStreamFunction( FlatStreamBuffer( stream_function, buffer_size=buffer_size, ), ), ), QueuedStreamFunctionReceiver(), IdentityInterpreter(), ) @pytest.fixture def rest_interpreter(): return rcompose(RestPickupStreamer(), RestReceiver())