Merge in RR/pyinfra from RED-6273-multi-tenant-storage to master
Squashed commit of the following:
commit 0fead1f8b59c9187330879b4e48d48355885c27c
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 28 15:02:22 2023 +0200
fix typos
commit 892a803726946876f8b8cd7905a0e73c419b2fb1
Author: Matthias Bisping <matthias.bisping@axbit.com>
Date: Tue Mar 28 14:41:49 2023 +0200
Refactoring
Replace custom storage caching logic with LRU decorator
commit eafcd90260731e3360ce960571f07dee8f521327
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Fri Mar 24 12:50:13 2023 +0100
fix bug in storage connection from endpoint
commit d0c9fb5b7d1c55ae2f90e8faa1efec9f7587c26a
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Fri Mar 24 11:49:34 2023 +0100
add logs to PayloadProcessor
- set log messages to determine if x-tenant
storage connection is working
commit 97309fe58037b90469cf7a3de342d4749a0edfde
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Fri Mar 24 10:41:59 2023 +0100
update PayloadProcessor
- introduce storage cache to make every unique
storage connection only once
- add functionality to pass optional processing
kwargs in queue message like the operation key to
the processing function
commit d48e8108fdc0d463c89aaa0d672061ab7dca83a0
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Wed Mar 22 13:34:43 2023 +0100
add multi-tenant storage connection 1st iteration
- forward x-tenant-id from queue message header to
payload processor
- add functions to receive storage infos from an
endpoint or the config. This enables hashing and
caching of connections created from these infos
- add function to initialize storage connections
from storage infos
- streamline and refactor tests to make them more
readable and robust and to make it easier to add
new tests
- update payload processor with first iteration
of multi tenancy storage connection support
with connection caching and backwards compability
commit 52c047c47b98e62d0b834a9b9b6c0e2bb0db41e5
Author: Julius Unverfehrt <julius.unverfehrt@iqser.com>
Date: Tue Mar 21 15:35:57 2023 +0100
add AES/GCM cipher functions
- decrypt x-tenant storage connection strings
58 lines
2.6 KiB
Python
58 lines
2.6 KiB
Python
import logging
|
|
|
|
import pytest
|
|
|
|
logger = logging.getLogger(__name__)
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
|
@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session")
|
|
@pytest.mark.parametrize("bucket_name", ["testbucket"], scope="session")
|
|
@pytest.mark.parametrize("monitoring_enabled", [False], scope="session")
|
|
class TestStorage:
|
|
def test_clearing_bucket_yields_empty_bucket(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
data_received = storage.get_all_objects(bucket_name)
|
|
assert not {*data_received}
|
|
|
|
def test_getting_object_put_in_bucket_is_object(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, "file", b"content")
|
|
data_received = storage.get_object(bucket_name, "file")
|
|
assert b"content" == data_received
|
|
|
|
def test_object_put_in_bucket_exists_on_storage(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, "file", b"content")
|
|
assert storage.exists(bucket_name, "file")
|
|
|
|
def test_getting_nested_object_put_in_bucket_is_nested_object(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, "folder/file", b"content")
|
|
data_received = storage.get_object(bucket_name, "folder/file")
|
|
assert b"content" == data_received
|
|
|
|
def test_getting_objects_put_in_bucket_are_objects(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, "file1", b"content 1")
|
|
storage.put_object(bucket_name, "folder/file2", b"content 2")
|
|
data_received = storage.get_all_objects(bucket_name)
|
|
assert {b"content 1", b"content 2"} == {*data_received}
|
|
|
|
def test_make_bucket_produces_bucket(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.make_bucket(bucket_name)
|
|
assert storage.has_bucket(bucket_name)
|
|
|
|
def test_listing_bucket_files_yields_all_files_in_bucket(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
storage.put_object(bucket_name, "file1", b"content 1")
|
|
storage.put_object(bucket_name, "file2", b"content 2")
|
|
full_names_received = storage.get_all_object_names(bucket_name)
|
|
assert {(bucket_name, "file1"), (bucket_name, "file2")} == {*full_names_received}
|
|
|
|
def test_data_loading_failure_raised_if_object_not_present(self, storage, bucket_name):
|
|
storage.clear_bucket(bucket_name)
|
|
with pytest.raises(Exception):
|
|
storage.get_object(bucket_name, "folder/file")
|