refactor: update storages with dynaconf logic, add validators, repair test
This commit is contained in:
parent
27917863c9
commit
e5c8a6e9f1
@ -1,32 +1,30 @@
|
||||
from functools import lru_cache, partial
|
||||
from typing import Callable
|
||||
|
||||
from dynaconf import Dynaconf
|
||||
from funcy import compose
|
||||
|
||||
from pyinfra.config import Config
|
||||
from pyinfra.storage.storage_info import get_storage_info_from_config, get_storage_from_storage_info
|
||||
from pyinfra.storage.storages.interface import Storage
|
||||
from pyinfra.storage.storages.s3 import get_s3_storage_from_settings
|
||||
from pyinfra.utils.compressing import get_decompressor, get_compressor
|
||||
from pyinfra.utils.config_validation import validate_settings, storage_validators
|
||||
from pyinfra.utils.encoding import get_decoder, get_encoder
|
||||
|
||||
|
||||
def get_storage_from_config(config: Config) -> Storage:
|
||||
def get_storage_from_settings(settings: Dynaconf) -> Storage:
|
||||
validate_settings(settings, storage_validators)
|
||||
|
||||
storage_info = get_storage_info_from_config(config)
|
||||
storage = get_storage_from_storage_info(storage_info)
|
||||
|
||||
return storage
|
||||
return storage_dispatcher[settings.storage.backend](settings)
|
||||
|
||||
|
||||
def verify_existence(storage: Storage, bucket: str, file_name: str) -> str:
|
||||
if not storage.exists(bucket, file_name):
|
||||
raise FileNotFoundError(f"{file_name=} name not found on storage in {bucket=}.")
|
||||
return file_name
|
||||
storage_dispatcher = {
|
||||
"azure": get_s3_storage_from_settings,
|
||||
"s3": get_s3_storage_from_settings,
|
||||
}
|
||||
|
||||
|
||||
@lru_cache(maxsize=10)
|
||||
def make_downloader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable:
|
||||
|
||||
verify = partial(verify_existence, storage, bucket)
|
||||
download = partial(storage.get_object, bucket)
|
||||
decompress = get_decompressor(compression_type)
|
||||
@ -37,7 +35,6 @@ def make_downloader(storage: Storage, bucket: str, file_type: str, compression_t
|
||||
|
||||
@lru_cache(maxsize=10)
|
||||
def make_uploader(storage: Storage, bucket: str, file_type: str, compression_type: str) -> Callable:
|
||||
|
||||
upload = partial(storage.put_object, bucket)
|
||||
compress = get_compressor(compression_type)
|
||||
encode = get_encoder(file_type)
|
||||
@ -46,3 +43,9 @@ def make_uploader(storage: Storage, bucket: str, file_type: str, compression_typ
|
||||
upload(file_name, compose(compress, encode)(file_bytes))
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def verify_existence(storage: Storage, bucket: str, file_name: str) -> str:
|
||||
if not storage.exists(bucket, file_name):
|
||||
raise FileNotFoundError(f"{file_name=} name not found on storage in {bucket=}.")
|
||||
return file_name
|
||||
|
||||
@ -1,12 +1,14 @@
|
||||
import logging
|
||||
from azure.storage.blob import BlobServiceClient, ContainerClient
|
||||
from itertools import repeat
|
||||
from kn_utils.logging import logger
|
||||
from operator import attrgetter
|
||||
|
||||
from azure.storage.blob import BlobServiceClient, ContainerClient
|
||||
from dynaconf import Dynaconf
|
||||
from kn_utils.logging import logger
|
||||
from retry import retry
|
||||
|
||||
from pyinfra.config import Config
|
||||
from pyinfra.storage.storages.interface import Storage
|
||||
from pyinfra.utils.config_validation import azure_storage_validators, validate_settings
|
||||
|
||||
logging.getLogger("azure").setLevel(logging.WARNING)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
@ -74,5 +76,6 @@ class AzureStorage(Storage):
|
||||
return zip(repeat(bucket_name), map(attrgetter("name"), blobs))
|
||||
|
||||
|
||||
def get_azure_storage_from_config(config: Config):
|
||||
return AzureStorage(BlobServiceClient.from_connection_string(conn_str=config.storage_azureconnectionstring))
|
||||
def get_azure_storage_from_settings(settings: Dynaconf):
|
||||
validate_settings(settings, azure_storage_validators)
|
||||
return AzureStorage(BlobServiceClient.from_connection_string(conn_str=settings.storage.azure.connection_string))
|
||||
|
||||
@ -1,12 +1,15 @@
|
||||
import io
|
||||
from itertools import repeat
|
||||
from operator import attrgetter
|
||||
|
||||
from dynaconf import Dynaconf
|
||||
from kn_utils.logging import logger
|
||||
from minio import Minio
|
||||
from operator import attrgetter
|
||||
from retry import retry
|
||||
|
||||
from pyinfra.config import Config
|
||||
from pyinfra.storage.storages.interface import Storage
|
||||
from pyinfra.utils.config_validation import validate_settings, s3_storage_validators
|
||||
from pyinfra.utils.url_parsing import validate_and_parse_s3_endpoint
|
||||
|
||||
|
||||
class S3Storage(Storage):
|
||||
@ -63,13 +66,17 @@ class S3Storage(Storage):
|
||||
return zip(repeat(bucket_name), map(attrgetter("object_name"), objs))
|
||||
|
||||
|
||||
def get_s3_storage_from_config(config: Config):
|
||||
def get_s3_storage_from_settings(settings: Dynaconf):
|
||||
validate_settings(settings, s3_storage_validators)
|
||||
|
||||
secure, endpoint = validate_and_parse_s3_endpoint(settings.storage.s3.endpoint)
|
||||
|
||||
return S3Storage(
|
||||
Minio(
|
||||
secure=config.storage_secure_connection,
|
||||
endpoint=config.storage_endpoint,
|
||||
access_key=config.storage_key,
|
||||
secret_key=config.storage_secret,
|
||||
region=config.storage_region,
|
||||
secure=secure,
|
||||
endpoint=endpoint,
|
||||
access_key=settings.storage.s3.key,
|
||||
secret_key=settings.storage.s3.secret,
|
||||
region=settings.storage.s3.region,
|
||||
)
|
||||
)
|
||||
|
||||
@ -13,6 +13,21 @@ queue_manager_validators = [
|
||||
Validator("rabbitmq.dead_letter_queue", must_exist=True),
|
||||
]
|
||||
|
||||
azure_storage_validators = [
|
||||
Validator("storage.azure.connection_string", must_exist=True),
|
||||
]
|
||||
|
||||
s3_storage_validators = [
|
||||
Validator("storage.s3.endpoint", must_exist=True),
|
||||
Validator("storage.s3.key", must_exist=True),
|
||||
Validator("storage.s3.secret", must_exist=True),
|
||||
Validator("storage.s3.region", must_exist=True),
|
||||
]
|
||||
|
||||
storage_validators = [
|
||||
Validator("storage.backend", must_exist=True),
|
||||
]
|
||||
|
||||
|
||||
def validate_settings(settings: Dynaconf, validators):
|
||||
settings_valid = True
|
||||
|
||||
@ -1,24 +1,20 @@
|
||||
import pytest
|
||||
|
||||
from pyinfra.storage.storage import get_storage_from_config
|
||||
from pyinfra.storage.storage import get_storage_from_settings
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def storage(storage_backend, bucket_name, settings):
|
||||
settings.storage_backend = storage_backend
|
||||
settings.storage_bucket = bucket_name
|
||||
settings.storage.backend = storage_backend
|
||||
|
||||
storage = get_storage_from_config(settings)
|
||||
storage = get_storage_from_settings(settings)
|
||||
storage.make_bucket(bucket_name)
|
||||
|
||||
yield storage
|
||||
storage.clear_bucket(bucket_name)
|
||||
|
||||
|
||||
# @pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session")
|
||||
# FIXME: Azure storage test needs the secret azure connection string
|
||||
# when the config is refactored as file, add this and provide file via bitwarden
|
||||
@pytest.mark.parametrize("storage_backend", ["s3"], scope="session")
|
||||
@pytest.mark.parametrize("storage_backend", ["azure", "s3"], scope="session")
|
||||
@pytest.mark.parametrize("bucket_name", ["bucket"], scope="session")
|
||||
class TestStorage:
|
||||
def test_clearing_bucket_yields_empty_bucket(self, storage, bucket_name):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user