refactoring

This commit is contained in:
Matthias Bisping 2022-04-28 18:28:35 +02:00
parent f47d458217
commit 087b5af929
3 changed files with 35 additions and 32 deletions

View File

@ -1,9 +1,8 @@
import json
from _operator import itemgetter
from collections import deque
from operator import itemgetter
import flask
from funcy import repeatedly, lmap, compose
from funcy import repeatedly, lmap, compose, zipdict, rcompose, identity
from test.utils.server import bytes_to_string, string_to_bytes

30
pyinfra/utils/func.py Normal file
View File

@ -0,0 +1,30 @@
from itertools import starmap, tee
from funcy import curry, compose
def lift(fn):
return curry(map)(fn)
def starlift(fn):
return curry(starmap)(fn)
def parallel(*fs):
return lambda *args: (f(a) for f, a in zip(fs, args))
def star(f):
return lambda x: f(*x)
def duplicate_stream_and_apply(f1, f2):
return compose(star(parallel(f1, f2)), tee)
def parallel_map(f1, f2):
"""Applies functions to a stream in parallel and yields a stream of tuples:
parallel_map :: a -> b, a -> c -> [a] -> [(b, c)]
"""
return compose(star(zip), duplicate_stream_and_apply(f1, f2))

View File

@ -1,47 +1,21 @@
import logging
from itertools import chain, starmap, tee
from itertools import chain
from operator import methodcaller, itemgetter
from typing import Iterable
import pytest
import requests
from funcy import curry, rcompose, compose, lmap, rpartial, identity
from funcy import rcompose, compose, lmap, rpartial, identity
from more_itertools import peekable
from pyinfra.rest import pack
from pyinfra.utils.func import lift, starlift, parallel_map
from test.utils.server import string_to_bytes
logger = logging.getLogger("PIL.PngImagePlugin")
logger.setLevel(logging.INFO)
def lift(fn):
return curry(map)(fn)
def starlift(fn):
return curry(starmap)(fn)
def parallel(*fs):
return lambda *args: (f(a) for f, a in zip(fs, args))
def star(f):
return lambda x: f(*x)
def duplicate_stream_and_apply(f1, f2):
return compose(star(parallel(f1, f2)), tee)
def parallel_map(f1, f2):
"""Applies functions to a stream in parallel and yields a stream of tuples:
parallel_map :: a -> b, a -> c -> [a] -> [(b, c)]
"""
return compose(star(zip), duplicate_stream_and_apply(f1, f2))
def post_partial(url, input_data: Iterable[bytes], metadata):
def send(method, data):
return method(url, json=data)