metadata as part of storage item test works

This commit is contained in:
Matthias Bisping 2022-05-23 15:59:56 +02:00
parent 0842ec0d91
commit 298d8d3e2c

View File

@ -1,15 +1,13 @@
import gzip
import json
import logging
from itertools import starmap
from itertools import starmap, repeat
import pytest
from funcy import notnone, filter, lfilter, lmap, compose
from funcy import lfilter, lmap, compose, lzip
from pyinfra.default_objects import get_visitor, get_queue_manager, get_storage, get_consumer, get_callback
from pyinfra.server.dispatcher.dispatcher import Nothing
from pyinfra.server.packer.packers.identity import bundle
from pyinfra.server.packing import string_to_bytes, bytes_to_string, unpack, pack
from pyinfra.server.packing import bytes_to_string, unpack, pack
from pyinfra.visitor import get_object_descriptor
from test.utils.input import adorn_data_with_storage_info
@ -33,6 +31,7 @@ logger = logging.getLogger(__name__)
[
"string",
"image",
"pdf",
],
)
def test_serving(
@ -42,8 +41,9 @@ def test_serving(
bucket_name,
endpoint,
core_operation,
targets,
storage_item_has_metadata,
target_data_items,
targets,
):
print()
@ -60,6 +60,8 @@ def test_serving(
data_metadata_packs = starmap(compose(lambda s: s.encode(), json.dumps, pack), zip(input_data_items, metadata))
else:
data_metadata_packs = map(compose(lambda s: s.encode(), json.dumps, bytes_to_string), input_data_items)
metadata = repeat({})
targets = lzip(target_data_items, metadata)
adorned_data_metadata_packs = adorn_data_with_storage_info(data_metadata_packs)
@ -74,18 +76,14 @@ def test_serving(
def decode(storage_item):
repr = gzip.decompress(storage_item).decode().replace(r"\"", "'").replace('"', "").replace("'", '"')
try:
return json.loads(repr)
except json.decoder.JSONDecodeError:
return None
storage_item = json.loads(repr)
data, metadata = unpack(storage_item)
return data, metadata
# print(list(storage.get_all_object_names(bucket_name)))
names_of_uploaded_files = lfilter(".out", storage.get_all_object_names(bucket_name))
uploaded_files = [storage.get_object(bucket_name, fn) for fn in names_of_uploaded_files]
# print(names_of_uploaded_files)
for storage_item in [*map(decode, uploaded_files)]:
storage_item["data"] = string_to_bytes(storage_item["data"])
print("si", storage_item)
# print(targets)
outputs = lmap(decode, uploaded_files)
print(outputs)
print(targets)
assert outputs == targets