Pull request #54: Feature/MLOPS-32 update pyinfra to use pypoetry.toml
Merge in RR/pyinfra from feature/MLOPS-32-update-pyinfra-to-use-pypoetry.toml to master * commit '37d8ee49a22ab9ee81792217404ed0a7daea65c2': (34 commits) add convenience command for version updates testing version is ahead in project test equal version number echo latest git version tag update tag fetching rollback testing hardcoded remove specific planRepository remove parentheses change project key add planRepositories config fix typo: licence -> license ignore bamboo YAML configs switch back to bamboo Java config update version tag manually remove superfulous `then` isolate feature/bugfix/hotfix and dev tag setting fix script `echo` was missing add version update shortcut show pyproject.toml file ...
This commit is contained in:
commit
edbe5fa4f0
106
.dockerignore
106
.dockerignore
@ -1,106 +0,0 @@
|
||||
data
|
||||
/build_venv/
|
||||
/.venv/
|
||||
/misc/
|
||||
/incl/image_service/test/
|
||||
/scratch/
|
||||
/bamboo-specs/
|
||||
README.md
|
||||
Dockerfile
|
||||
*idea
|
||||
*misc
|
||||
*egg-innfo
|
||||
*pycache*
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# CI
|
||||
.codeclimate.yml
|
||||
.travis.yml
|
||||
.taskcluster.yml
|
||||
|
||||
# Docker
|
||||
.docker
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*/__pycache__/
|
||||
*/*/__pycache__/
|
||||
*/*/*/__pycache__/
|
||||
*.py[cod]
|
||||
*/*.py[cod]
|
||||
*/*/*.py[cod]
|
||||
*/*/*/*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/**
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Virtual environment
|
||||
.env/
|
||||
.venv/
|
||||
#venv/
|
||||
|
||||
# PyCharm
|
||||
.idea
|
||||
|
||||
# Python mode for VIM
|
||||
.ropeproject
|
||||
*/.ropeproject
|
||||
*/*/.ropeproject
|
||||
*/*/*/.ropeproject
|
||||
|
||||
# Vim swap files
|
||||
*.swp
|
||||
*/*.swp
|
||||
*/*/*.swp
|
||||
*/*/*/*.swp
|
||||
58
.gitignore
vendored
58
.gitignore
vendored
@ -1,10 +1,54 @@
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
__pycache__
|
||||
data/
|
||||
env/
|
||||
venv/
|
||||
.DS_Store
|
||||
|
||||
# Project folders
|
||||
*.vscode/
|
||||
.idea
|
||||
*_app
|
||||
*pytest_cache
|
||||
*joblib
|
||||
*tmp
|
||||
*profiling
|
||||
*logs
|
||||
*docker
|
||||
*drivers
|
||||
*bamboo-specs/target
|
||||
.coverage
|
||||
data
|
||||
build_venv
|
||||
reports
|
||||
pyinfra.egg-info
|
||||
bamboo-specs/target
|
||||
.pytest_cache
|
||||
/.coverage
|
||||
.idea
|
||||
|
||||
# Python specific files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.ipynb
|
||||
*.ipynb_checkpoints
|
||||
|
||||
# file extensions
|
||||
*.log
|
||||
*.csv
|
||||
*.json
|
||||
*.pkl
|
||||
*.profile
|
||||
*.cbm
|
||||
*.egg-info
|
||||
|
||||
# temp files
|
||||
*.swp
|
||||
*~
|
||||
*.un~
|
||||
|
||||
# keep files
|
||||
!notebooks/*.ipynb
|
||||
|
||||
# keep folders
|
||||
!secrets
|
||||
!data/*
|
||||
!drivers
|
||||
|
||||
# ignore files
|
||||
bamboo.yml
|
||||
|
||||
35
.pre-commit-config.yaml
Normal file
35
.pre-commit-config.yaml
Normal file
@ -0,0 +1,35 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
exclude: ^(docs/|notebooks/|data/|src/secrets/|src/static/|src/templates/|tests)
|
||||
default_language_version:
|
||||
python: python3.8
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
exclude: bamboo-specs/bamboo.yml
|
||||
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
args:
|
||||
- "--max-line-length=120"
|
||||
- "--ignore=F401,W503"
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v5.10.1
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.10.0
|
||||
hooks:
|
||||
- id: black
|
||||
# exclude: ^(docs/|notebooks/|data/|src/secrets/)
|
||||
args:
|
||||
- --line-length=120
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
||||
3.8.13
|
||||
@ -1,19 +0,0 @@
|
||||
ARG BASE_ROOT="nexus.iqser.com:5001/red/"
|
||||
ARG VERSION_TAG="dev"
|
||||
|
||||
FROM ${BASE_ROOT}pyinfra:${VERSION_TAG}
|
||||
|
||||
EXPOSE 5000
|
||||
EXPOSE 8080
|
||||
|
||||
RUN python3 -m pip install coverage
|
||||
|
||||
# Make a directory for the service files and copy the service repo into the container.
|
||||
WORKDIR /app/service
|
||||
COPY . .
|
||||
|
||||
# Install module & dependencies
|
||||
RUN python3 -m pip install -e .
|
||||
RUN python3 -m pip install -r requirements.txt
|
||||
|
||||
CMD coverage run -m pytest test/ -x && coverage report -m && coverage xml
|
||||
85
Makefile
Normal file
85
Makefile
Normal file
@ -0,0 +1,85 @@
|
||||
.PHONY: \
|
||||
poetry in-project-venv dev-env use-env install install-dev tests \
|
||||
update-version sync-version-with-git \
|
||||
docker docker-build-run docker-build docker-run \
|
||||
docker-rm docker-rm-container docker-rm-image \
|
||||
pre-commit get-licenses prep-commit \
|
||||
docs sphinx_html sphinx_apidoc
|
||||
.DEFAULT_GOAL := run
|
||||
|
||||
export DOCKER=docker
|
||||
export DOCKERFILE=Dockerfile
|
||||
export IMAGE_NAME=rule_engine-image
|
||||
export CONTAINER_NAME=rule_engine-container
|
||||
export HOST_PORT=9999
|
||||
export CONTAINER_PORT=9999
|
||||
export PYTHON_VERSION=python3.8
|
||||
|
||||
# all commands should be executed in the root dir or the project,
|
||||
# specific environments should be deactivated
|
||||
|
||||
poetry: in-project-venv use-env dev-env
|
||||
|
||||
in-project-venv:
|
||||
poetry config virtualenvs.in-project true
|
||||
|
||||
use-env:
|
||||
poetry env use ${PYTHON_VERSION}
|
||||
|
||||
dev-env:
|
||||
poetry install --with dev
|
||||
|
||||
install:
|
||||
poetry add $(pkg)
|
||||
|
||||
install-dev:
|
||||
poetry add --dev $(pkg)
|
||||
|
||||
requirements:
|
||||
poetry export --without-hashes --output ./src/requirements.txt
|
||||
|
||||
update-version:
|
||||
poetry version prerelease
|
||||
|
||||
sync-version-with-git:
|
||||
git pull -p && poetry version $(git rev-list --tags --max-count=1 | git describe --tags --abbrev=0)
|
||||
|
||||
docker: docker-rm docker-build-run
|
||||
|
||||
docker-build-run: docker-build docker-run
|
||||
|
||||
docker-build:
|
||||
$(DOCKER) build \
|
||||
--no-cache --progress=plain \
|
||||
-t $(IMAGE_NAME) -f $(DOCKERFILE) .
|
||||
|
||||
docker-run:
|
||||
$(DOCKER) run -it --rm -p $(HOST_PORT):$(CONTAINER_PORT)/tcp --name $(CONTAINER_NAME) $(IMAGE_NAME) python app.py
|
||||
|
||||
docker-rm: docker-rm-container docker-rm-image
|
||||
|
||||
docker-rm-container:
|
||||
-$(DOCKER) rm $(CONTAINER_NAME)
|
||||
|
||||
docker-rm-image:
|
||||
-$(DOCKER) image rm $(IMAGE_NAME)
|
||||
|
||||
tests:
|
||||
poetry run pytest ./tests
|
||||
|
||||
prep-commit:
|
||||
docs get-license sync-version-with-git update-version pre-commit
|
||||
|
||||
pre-commit:
|
||||
pre-commit run --all-files
|
||||
|
||||
get-licenses:
|
||||
pip-licenses --format=json --order=license --with-urls > pkg-licenses.json
|
||||
|
||||
docs: sphinx_apidoc sphinx_html
|
||||
|
||||
sphinx_html:
|
||||
poetry run sphinx-build -b html docs/source/ docs/build/html -E -a
|
||||
|
||||
sphinx_apidoc:
|
||||
poetry run sphinx-apidoc -o ./docs/source/modules ./src/rule_engine
|
||||
47
README.md
47
README.md
@ -54,53 +54,18 @@ Optionally, the input message can contain a field with the key `"operations"`.
|
||||
}
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
Either run `src/serve.py` or the built Docker image.
|
||||
|
||||
### Setup
|
||||
|
||||
Install module.
|
||||
Install project dependencies
|
||||
|
||||
```bash
|
||||
pip install -e .
|
||||
pip install -r requirements.txt
|
||||
make poetry
|
||||
```
|
||||
|
||||
or build docker image.
|
||||
You don't have to install it independently in the project repo, just `import pyinfra` in any `.py`-file
|
||||
|
||||
or install form another project
|
||||
|
||||
```bash
|
||||
docker build -f Dockerfile -t pyinfra .
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
**Shell 1:** Start a MinIO and a RabbitMQ docker container.
|
||||
|
||||
```bash
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
**Shell 2:** Add files to the local minio storage.
|
||||
|
||||
```bash
|
||||
python scripts/manage_minio.py add <MinIO target folder> -d path/to/a/folder/with/PDFs
|
||||
```
|
||||
|
||||
**Shell 2:** Run pyinfra-server.
|
||||
|
||||
```bash
|
||||
python src/serve.py
|
||||
```
|
||||
or as container:
|
||||
|
||||
```bash
|
||||
docker run --net=host pyinfra
|
||||
```
|
||||
|
||||
**Shell 3:** Run analysis-container.
|
||||
|
||||
**Shell 4:** Start a client that sends requests to process PDFs from the MinIO store and annotates these PDFs according to the service responses.
|
||||
```bash
|
||||
python scripts/mock_client.py
|
||||
poetry add git+ssh://git@git.iqser.com:2222/rr/pyinfra.git#TAG-NUMBER
|
||||
```
|
||||
|
||||
@ -37,4 +37,4 @@
|
||||
|
||||
<!-- run 'mvn test' to perform offline validation of the plan -->
|
||||
<!-- run 'mvn -Ppublish-specs' to upload the plan to your Bamboo server -->
|
||||
</project>
|
||||
</project>
|
||||
|
||||
@ -21,128 +21,143 @@ import com.atlassian.bamboo.specs.builders.task.CleanWorkingDirectoryTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.VcsTagTask;
|
||||
import com.atlassian.bamboo.specs.builders.trigger.BitbucketServerTrigger;
|
||||
import com.atlassian.bamboo.specs.model.task.InjectVariablesScope;
|
||||
import com.atlassian.bamboo.specs.api.builders.Variable;
|
||||
import com.atlassian.bamboo.specs.util.BambooServer;
|
||||
import com.atlassian.bamboo.specs.builders.task.ScriptTask;
|
||||
import com.atlassian.bamboo.specs.model.task.ScriptTaskProperties.Location;
|
||||
|
||||
/**
|
||||
* Plan configuration for Bamboo.
|
||||
* Learn more on: <a href="https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs">https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs</a>
|
||||
* Learn more on: <a href=
|
||||
* "https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs">https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs</a>
|
||||
*/
|
||||
@BambooSpec
|
||||
public class PlanSpec {
|
||||
|
||||
private static final String SERVICE_NAME = "pyinfra";
|
||||
private static final String SERVICE_NAME = "pyinfra";
|
||||
|
||||
private static final String SERVICE_KEY = SERVICE_NAME.toUpperCase().replaceAll("-","");
|
||||
private static final String SERVICE_KEY = SERVICE_NAME.toUpperCase().replaceAll("-", "");
|
||||
|
||||
/**
|
||||
* Run main to publish plan on Bamboo
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
//By default credentials are read from the '.credentials' file.
|
||||
BambooServer bambooServer = new BambooServer("http://localhost:8085");
|
||||
/**
|
||||
* Run main to publish plan on Bamboo
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
// By default credentials are read from the '.credentials' file.
|
||||
BambooServer bambooServer = new BambooServer("http://localhost:8085");
|
||||
|
||||
Plan plan = new PlanSpec().createBuildPlan();
|
||||
bambooServer.publish(plan);
|
||||
PlanPermissions planPermission = new PlanSpec().createPlanPermission(plan.getIdentifier());
|
||||
bambooServer.publish(planPermission);
|
||||
}
|
||||
Plan plan = new PlanSpec().createBuildPlan();
|
||||
bambooServer.publish(plan);
|
||||
PlanPermissions planPermission = new PlanSpec().createPlanPermission(plan.getIdentifier());
|
||||
bambooServer.publish(planPermission);
|
||||
}
|
||||
|
||||
private PlanPermissions createPlanPermission(PlanIdentifier planIdentifier) {
|
||||
Permissions permission = new Permissions()
|
||||
.userPermissions("atlbamboo", PermissionType.EDIT, PermissionType.VIEW, PermissionType.ADMIN, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("research", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("Development", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("QA", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.loggedInUserPermissions(PermissionType.VIEW)
|
||||
.anonymousUserPermissionView();
|
||||
return new PlanPermissions(planIdentifier.getProjectKey(), planIdentifier.getPlanKey()).permissions(permission);
|
||||
}
|
||||
private PlanPermissions createPlanPermission(PlanIdentifier planIdentifier) {
|
||||
Permissions permission = new Permissions()
|
||||
.userPermissions("atlbamboo", PermissionType.EDIT, PermissionType.VIEW, PermissionType.ADMIN,
|
||||
PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("research", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE,
|
||||
PermissionType.BUILD)
|
||||
.groupPermissions("Development", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE,
|
||||
PermissionType.BUILD)
|
||||
.groupPermissions("QA", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE,
|
||||
PermissionType.BUILD)
|
||||
.loggedInUserPermissions(PermissionType.VIEW)
|
||||
.anonymousUserPermissionView();
|
||||
return new PlanPermissions(planIdentifier.getProjectKey(), planIdentifier.getPlanKey()).permissions(permission);
|
||||
}
|
||||
|
||||
private Project project() {
|
||||
return new Project()
|
||||
.name("RED")
|
||||
.key(new BambooKey("RED"));
|
||||
}
|
||||
private Project project() {
|
||||
return new Project()
|
||||
.name("RED")
|
||||
.key(new BambooKey("RED"));
|
||||
}
|
||||
|
||||
public Plan createBuildPlan() {
|
||||
return new Plan(
|
||||
project(),
|
||||
SERVICE_NAME, new BambooKey(SERVICE_KEY))
|
||||
.description("Build for pyinfra")
|
||||
.stages(
|
||||
new Stage("Sonar Stage")
|
||||
.jobs(
|
||||
new Job("Sonar Job", new BambooKey("SONAR"))
|
||||
.tasks(
|
||||
new CleanWorkingDirectoryTask()
|
||||
.description("Clean working directory.")
|
||||
.enabled(true),
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Set config and keys.")
|
||||
.inlineBody("mkdir -p ~/.ssh\n" +
|
||||
"echo \"${bamboo.bamboo_agent_ssh}\" | base64 -d >> ~/.ssh/id_rsa\n" +
|
||||
"echo \"host vector.iqser.com\" > ~/.ssh/config\n" +
|
||||
"echo \" user bamboo-agent\" >> ~/.ssh/config\n" +
|
||||
"chmod 600 ~/.ssh/config ~/.ssh/id_rsa"),
|
||||
new ScriptTask()
|
||||
.description("Run Sonarqube scan.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath("bamboo-specs/src/main/resources/scripts/sonar-scan.sh")
|
||||
.argument(SERVICE_NAME))
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/release_build:4.2.0")
|
||||
.volume("/var/run/docker.sock", "/var/run/docker.sock"))),
|
||||
new Stage("Licence Stage")
|
||||
.jobs(
|
||||
new Job("Git Tag Job", new BambooKey("GITTAG"))
|
||||
.tasks(
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Build git tag.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath("bamboo-specs/src/main/resources/scripts/git-tag.sh"),
|
||||
new InjectVariablesTask()
|
||||
.description("Inject git tag.")
|
||||
.path("git.tag")
|
||||
.namespace("g")
|
||||
.scope(InjectVariablesScope.LOCAL),
|
||||
new VcsTagTask()
|
||||
.description("${bamboo.g.gitTag}")
|
||||
.tagName("${bamboo.g.gitTag}")
|
||||
.defaultRepository())
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/release_build:4.4.1")),
|
||||
new Job("Licence Job", new BambooKey("LICENCE"))
|
||||
.enabled(false)
|
||||
.tasks(
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Build licence.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath("bamboo-specs/src/main/resources/scripts/create-licence.sh"))
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/maven:3.6.2-jdk-13-3.0.0")
|
||||
.volume("/etc/maven/settings.xml", "/usr/share/maven/ref/settings.xml")
|
||||
.volume("/var/run/docker.sock", "/var/run/docker.sock"))))
|
||||
.linkedRepositories("RR / " + SERVICE_NAME)
|
||||
.triggers(new BitbucketServerTrigger())
|
||||
.planBranchManagement(new PlanBranchManagement()
|
||||
.createForVcsBranch()
|
||||
.delete(new BranchCleanup()
|
||||
.whenInactiveInRepositoryAfterDays(14))
|
||||
.notificationForCommitters());
|
||||
}
|
||||
public Plan createBuildPlan() {
|
||||
return new Plan(
|
||||
project(),
|
||||
SERVICE_NAME, new BambooKey(SERVICE_KEY))
|
||||
.description("Build for pyinfra")
|
||||
.stages(
|
||||
new Stage("Sonar Stage")
|
||||
.jobs(
|
||||
new Job("Sonar Job", new BambooKey("SONAR"))
|
||||
.tasks(
|
||||
new CleanWorkingDirectoryTask()
|
||||
.description("Clean working directory.")
|
||||
.enabled(true),
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Set config and keys.")
|
||||
.inlineBody("mkdir -p ~/.ssh\n" +
|
||||
"echo \"${bamboo.bamboo_agent_ssh}\" | base64 -d >> ~/.ssh/id_rsa\n"
|
||||
+
|
||||
"echo \"host vector.iqser.com\" > ~/.ssh/config\n"
|
||||
+
|
||||
"echo \" user bamboo-agent\" >> ~/.ssh/config\n"
|
||||
+
|
||||
"chmod 600 ~/.ssh/config ~/.ssh/id_rsa"),
|
||||
new ScriptTask()
|
||||
.description("Run Sonarqube scan.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath(
|
||||
"bamboo-specs/src/main/resources/scripts/sonar-scan.sh")
|
||||
.argument(SERVICE_NAME))
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/release_build:4.2.0")
|
||||
.volume("/var/run/docker.sock",
|
||||
"/var/run/docker.sock"))),
|
||||
new Stage("Git Stage")
|
||||
.jobs(
|
||||
new Job("Git Tag Job", new BambooKey("GITTAG"))
|
||||
.tasks(
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Build git tag.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath(
|
||||
"bamboo-specs/src/main/resources/scripts/git-tag.sh"),
|
||||
new InjectVariablesTask()
|
||||
.description("Inject git tag.")
|
||||
.path("git.tag")
|
||||
.namespace("g")
|
||||
.scope(InjectVariablesScope.LOCAL),
|
||||
new VcsTagTask()
|
||||
.description("${bamboo.g.gitTag}")
|
||||
.tagName("${bamboo.g.gitTag}")
|
||||
.defaultRepository())
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/release_build:4.4.1"))),
|
||||
new Stage("License Stage")
|
||||
.jobs(
|
||||
new Job("License Job", new BambooKey("LICENSE"))
|
||||
.enabled(true)
|
||||
.tasks(
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout default repository.")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Build licence.")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath(
|
||||
"bamboo-specs/src/main/resources/scripts/create-licence.sh"))
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/maven:3.6.2-jdk-13-3.0.0")
|
||||
.volume("/etc/maven/settings.xml",
|
||||
"/usr/share/maven/ref/settings.xml")
|
||||
.volume("/var/run/docker.sock",
|
||||
"/var/run/docker.sock"))))
|
||||
.linkedRepositories("RR / " + SERVICE_NAME)
|
||||
.triggers(new BitbucketServerTrigger())
|
||||
.planBranchManagement(
|
||||
new PlanBranchManagement()
|
||||
.createForVcsBranch()
|
||||
.delete(new BranchCleanup()
|
||||
.whenInactiveInRepositoryAfterDays(14))
|
||||
.notificationForCommitters());
|
||||
}
|
||||
}
|
||||
|
||||
8
bamboo-specs/src/main/resources/scripts/config-keys.sh
Normal file
8
bamboo-specs/src/main/resources/scripts/config-keys.sh
Normal file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
mkdir -p ~/.ssh
|
||||
echo "${bamboo.bamboo_agent_ssh}" | base64 -d >> ~/.ssh/id_rsa
|
||||
echo "host vector.iqser.com" > ~/.ssh/config
|
||||
echo " user bamboo-agent" >> ~/.ssh/config
|
||||
chmod 600 ~/.ssh/config ~/.ssh/id_rsa
|
||||
@ -16,4 +16,4 @@ then
|
||||
-Dmaven.wagon.http.ssl.allowall=true \
|
||||
-Dmaven.wagon.http.ssl.ignore.validity.dates=true \
|
||||
-DaltDeploymentRepository=iqser_release::default::https://nexus.iqser.com/repository/gin4-platform-releases
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -1,9 +1,85 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [[ "${bamboo_version_tag}" = "dev" ]]
|
||||
python3 -m venv build_venv
|
||||
source build_venv/bin/activate
|
||||
python3 -m pip install --upgrade pip
|
||||
pip install poetry
|
||||
|
||||
echo "bamboo plan repo branch name: $bamboo_planRepository_branchName"
|
||||
echo "bamboo version tag: $bamboo_version_tag"
|
||||
echo "bamboo plan repo 1 branch: $bamboo_planRepository_1_branch"
|
||||
echo "bamboo build number: $bamboo_buildNumber"
|
||||
|
||||
gitVersion=$(git rev-list --tags --max-count=1 | git describe --tags --abbrev=0)
|
||||
echo "Latest GIT VERSION TAG: $gitVersion"
|
||||
|
||||
|
||||
# update version in poetry to latest version in git if it is lower
|
||||
# semver regex pattern: (\d+)\.(\d+)\.(\d+)([a-zA-Z\d]*)?-?(dev\d*|post\d*)?
|
||||
check_poetry_version () {
|
||||
projectVersion=$(poetry version -s)
|
||||
tagCount=$(git rev-list --tags --max-count=1 | wc -l)
|
||||
|
||||
if [[ $tagCount -gt 0 ]]
|
||||
then
|
||||
echo "latest version tag in git: ${gitVersion}"
|
||||
echo "current version in project: ${projectVersion}"
|
||||
|
||||
if [[ $projectVersion < $gitVersion ]]
|
||||
then
|
||||
echo "project version is behind"
|
||||
echo "setting latest git tag as current version"
|
||||
poetry version "$gitVersion"
|
||||
updateVersion=0 # 0 means all is good means true, yes it's weird
|
||||
elif [[ $projectVersion == $gitVersion ]]
|
||||
then
|
||||
echo "project version matches"
|
||||
echo "keeping the project version"
|
||||
updateVersion=0
|
||||
else
|
||||
echo "project version is higher (aka. has been manually set in pyproject.toml)"
|
||||
echo "keeping the project version"
|
||||
updateVersion=1
|
||||
fi
|
||||
else
|
||||
updateVersion=1
|
||||
fi
|
||||
|
||||
return $updateVersion
|
||||
}
|
||||
|
||||
|
||||
if [[ $bamboo_planRepository_branchName == "master" ]]
|
||||
then
|
||||
echo "gitTag=${bamboo_planRepository_1_branch}_${bamboo_buildNumber}" > git.tag
|
||||
if check_poetry_version
|
||||
then
|
||||
echo "updating version number by one minor increment"
|
||||
poetry version minor
|
||||
fi
|
||||
elif [[ $bamboo_planRepository_branchName =~ ^(release/) ]]
|
||||
then
|
||||
if check_poetry_version
|
||||
then
|
||||
echo "updating version number by one patch increment"
|
||||
poetry version patch
|
||||
fi
|
||||
else
|
||||
echo "gitTag=${bamboo_version_tag}" > git.tag
|
||||
fi
|
||||
if check_poetry_version
|
||||
then
|
||||
echo "updating version number by one prerelease increment"
|
||||
poetry version prerelease
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ $bamboo_planRepository_branchName =~ ^(hotfix/|bugfix/|feature/) ]]
|
||||
then
|
||||
newVersion=$(poetry version -s)
|
||||
else
|
||||
newVersion="$(poetry version -s)-dev"
|
||||
fi
|
||||
|
||||
|
||||
echo "new build on $bamboo_planRepository_branchName with version: $newVersion"
|
||||
echo "gitTag=$newVersion" > git.tag
|
||||
|
||||
@ -9,8 +9,8 @@ import org.junit.Test;
|
||||
public class PlanSpecTest {
|
||||
@Test
|
||||
public void checkYourPlanOffline() throws PropertiesValidationException {
|
||||
Plan plan = new PlanSpec().createDockerBuildPlan();
|
||||
Plan plan = new PlanSpec().createBuildPlan();
|
||||
|
||||
EntityPropertiesBuilders.build(plan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
1584
poetry.lock
generated
Normal file
1584
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,3 @@
|
||||
from pyinfra import k8s_probes, queue, storage, config
|
||||
from pyinfra import config, k8s_probes, queue, storage
|
||||
|
||||
__all__ = ["k8s_probes", "queue", "storage", "config"]
|
||||
__all__ = ["k8s_probes", "queue", "storage", "config"]
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
from pyinfra.k8s_probes import startup
|
||||
|
||||
__all__ = ["startup"]
|
||||
__all__ = ["startup"]
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from pyinfra.queue.queue_manager import token_file_name
|
||||
|
||||
@ -29,7 +29,7 @@ def check_token_file():
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
if check_token_file():
|
||||
sys.exit(0)
|
||||
else:
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
from pyinfra.queue import queue_manager
|
||||
|
||||
__all__ = ["queue_manager"]
|
||||
__all__ = ["queue_manager"]
|
||||
|
||||
@ -2,8 +2,8 @@ import atexit
|
||||
import json
|
||||
import logging
|
||||
import signal
|
||||
from typing import Callable
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
import pika
|
||||
import pika.exceptions
|
||||
@ -91,9 +91,7 @@ class QueueManager(object):
|
||||
self.logger.info(f"Registered with consumer-tag: {self._consumer_token}")
|
||||
self._channel.start_consuming()
|
||||
except Exception:
|
||||
self.logger.warning(
|
||||
"An unexpected exception occurred while consuming messages. Consuming will stop."
|
||||
)
|
||||
self.logger.warning("An unexpected exception occurred while consuming messages. Consuming will stop.")
|
||||
raise
|
||||
finally:
|
||||
self.stop_consuming()
|
||||
@ -117,8 +115,9 @@ class QueueManager(object):
|
||||
# Requeueing will be handled by the dead-letter-exchange.
|
||||
# This prevents endless retries on messages that are impossible to process.
|
||||
if frame.redelivered:
|
||||
self.logger.info(f"Aborting message processing for delivery_tag {frame.delivery_tag} "
|
||||
f"due to it being redelivered")
|
||||
self.logger.info(
|
||||
f"Aborting message processing for delivery_tag {frame.delivery_tag} " f"due to it being redelivered"
|
||||
)
|
||||
self._channel.basic_nack(frame.delivery_tag, requeue=False)
|
||||
return
|
||||
|
||||
@ -129,8 +128,10 @@ class QueueManager(object):
|
||||
should_publish_result, callback_result = process_message_callback(unpacked_message_body)
|
||||
|
||||
if should_publish_result:
|
||||
self.logger.info(f"Processed message with delivery_tag {frame.delivery_tag}, "
|
||||
f"publishing result to result-queue")
|
||||
self.logger.info(
|
||||
f"Processed message with delivery_tag {frame.delivery_tag}, "
|
||||
f"publishing result to result-queue"
|
||||
)
|
||||
self._channel.basic_publish("", self._output_queue, json.dumps(callback_result).encode())
|
||||
|
||||
self.logger.info(
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from pyinfra.storage import adapters, storage
|
||||
from pyinfra.storage.storage import get_storage
|
||||
|
||||
__all__ = ["adapters", "storage"]
|
||||
__all__ = ["adapters", "storage"]
|
||||
|
||||
@ -2,7 +2,7 @@ import logging
|
||||
from itertools import repeat
|
||||
from operator import attrgetter
|
||||
|
||||
from azure.storage.blob import ContainerClient, BlobServiceClient
|
||||
from azure.storage.blob import BlobServiceClient, ContainerClient
|
||||
from retry import retry
|
||||
|
||||
from pyinfra.config import Config, get_config
|
||||
|
||||
@ -10,7 +10,6 @@ from retry import retry
|
||||
|
||||
from pyinfra.config import Config, get_config
|
||||
|
||||
|
||||
CONFIG = get_config()
|
||||
logger = logging.getLogger(CONFIG.logging_level_root)
|
||||
|
||||
@ -24,6 +23,15 @@ URL_VALIDATOR = re.compile(
|
||||
+ r"((?:\/[\+~%\/\.\w\-_]*)?"
|
||||
+ r"\??(?:[\-\+=&;%@\.\w_]*)#?(?:[\.\!\/\\\w]*))?)"
|
||||
)
|
||||
# URL_VALIDATOR = re.compile(
|
||||
# r"""^((([A-Za-z]{3,9}:(?:\/\/)?)
|
||||
# (?:[\-;:&=\+\$,\w]+@)?
|
||||
# [A-Za-z0-9\.\-]+|(?:www\.|[\-;:&=\+\$,\w]+@)
|
||||
# [A-Za-z0-9\.\-]+)
|
||||
# ((?:\/[\+~%\/\.\w\-_]*)?
|
||||
# \??(?:[\-\+=&;%@\.\w_]*)
|
||||
# \#?(?:[\.\!\/\\\w]*))?)"""
|
||||
# )
|
||||
|
||||
|
||||
class S3StorageAdapter(object):
|
||||
|
||||
@ -1,6 +1,4 @@
|
||||
import logging
|
||||
|
||||
from pyinfra.config import get_config, Config
|
||||
from pyinfra.config import Config
|
||||
from pyinfra.storage.adapters.azure import get_azure_storage
|
||||
from pyinfra.storage.adapters.s3 import get_s3_storage
|
||||
|
||||
|
||||
36
pyproject.toml
Normal file
36
pyproject.toml
Normal file
@ -0,0 +1,36 @@
|
||||
[tool.poetry]
|
||||
name = "pyinfra"
|
||||
version = "1.1.1a7"
|
||||
description = ""
|
||||
authors = ["Francisco Schulz <francisco.schulz@iqser.com>"]
|
||||
license = "All rights reseverd"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "~3.8"
|
||||
pika = "1.2.0"
|
||||
retry = "0.9.2"
|
||||
minio = "7.1.3"
|
||||
azure-core = "1.22.1"
|
||||
azure-storage-blob = "12.9.0"
|
||||
testcontainers = "3.4.2"
|
||||
docker-compose = "1.29.2"
|
||||
funcy = "1.17"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^7.1.3"
|
||||
ipykernel = "^6.16.0"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "6.0"
|
||||
addopts = "-ra -q"
|
||||
testpaths = [
|
||||
"tests",
|
||||
"integration",
|
||||
]
|
||||
log_cli = 1
|
||||
log_cli_level = "DEBUG"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
@ -1,4 +0,0 @@
|
||||
[pytest]
|
||||
log_cli = 1
|
||||
log_cli_level = DEBUG
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
pika==1.2.0
|
||||
retry==0.9.2
|
||||
minio==7.1.3
|
||||
azure-core==1.22.1
|
||||
azure-storage-blob==12.9.0
|
||||
testcontainers==3.4.2
|
||||
docker-compose==1.29.2
|
||||
pytest~=7.0.1
|
||||
funcy==1.17
|
||||
@ -1,9 +0,0 @@
|
||||
echo "${bamboo_nexus_password}" | docker login --username "${bamboo_nexus_user}" --password-stdin nexus.iqser.com:5001
|
||||
docker build -f Dockerfile_tests -t pyinfra-tests .
|
||||
|
||||
rnd=$(date +"%s")
|
||||
name=pyinfra-tests-${rnd}
|
||||
|
||||
echo "running tests container"
|
||||
|
||||
docker run --rm --net=host --name $name -v $PWD:$PWD -w $PWD -v /var/run/docker.sock:/var/run/docker.sock pyinfra-tests
|
||||
13
setup.py
13
setup.py
@ -1,13 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
setup(
|
||||
name="pyinfra",
|
||||
version="0.0.1",
|
||||
description="",
|
||||
author="",
|
||||
author_email="",
|
||||
url="",
|
||||
packages=["pyinfra"],
|
||||
)
|
||||
@ -1,4 +1,4 @@
|
||||
sonar.exclusions=bamboo-specs/**, build_venv/**
|
||||
sonar.exclusions=bamboo-specs/**
|
||||
sonar.c.file.suffixes=-
|
||||
sonar.cpp.file.suffixes=-
|
||||
sonar.objc.file.suffixes=-
|
||||
|
||||
194
test.ipynb
Normal file
194
test.ipynb
Normal file
@ -0,0 +1,194 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "ModuleNotFoundError",
|
||||
"evalue": "No module named 'pprint.pprint'; 'pprint' is not a package",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn [10], line 4\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01myaml\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01myaml\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mloader\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FullLoader\n\u001b[0;32m----> 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mpprint\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpprint\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mpp\u001b[39;00m\n",
|
||||
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'pprint.pprint'; 'pprint' is not a package"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import pyinfra\n",
|
||||
"import yaml\n",
|
||||
"from yaml.loader import FullLoader\n",
|
||||
"import pprint"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'logging': 0,\n",
|
||||
" 'mock_analysis_endpoint': 'http://127.0.0.1:5000',\n",
|
||||
" 'service': {'operations': {'classify': {'input': {'extension': 'cls_in.gz',\n",
|
||||
" 'multi': True,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'cls_out.gz',\n",
|
||||
" 'subdir': ''}},\n",
|
||||
" 'default': {'input': {'extension': 'IN.gz',\n",
|
||||
" 'multi': False,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'OUT.gz',\n",
|
||||
" 'subdir': ''}},\n",
|
||||
" 'extract': {'input': {'extension': 'extr_in.gz',\n",
|
||||
" 'multi': False,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'gz',\n",
|
||||
" 'subdir': 'extractions'}},\n",
|
||||
" 'rotate': {'input': {'extension': 'rot_in.gz',\n",
|
||||
" 'multi': False,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'rot_out.gz',\n",
|
||||
" 'subdir': ''}},\n",
|
||||
" 'stream_pages': {'input': {'extension': 'pgs_in.gz',\n",
|
||||
" 'multi': False,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'pgs_out.gz',\n",
|
||||
" 'subdir': 'pages'}},\n",
|
||||
" 'upper': {'input': {'extension': 'up_in.gz',\n",
|
||||
" 'multi': False,\n",
|
||||
" 'subdir': ''},\n",
|
||||
" 'output': {'extension': 'up_out.gz',\n",
|
||||
" 'subdir': ''}}},\n",
|
||||
" 'response_formatter': 'identity'},\n",
|
||||
" 'storage': {'aws': {'access_key': 'AKIA4QVP6D4LCDAGYGN2',\n",
|
||||
" 'endpoint': 'https://s3.amazonaws.com',\n",
|
||||
" 'region': '$STORAGE_REGION|\"eu-west-1\"',\n",
|
||||
" 'secret_key': '8N6H1TUHTsbvW2qMAm7zZlJ63hMqjcXAsdN7TYED'},\n",
|
||||
" 'azure': {'connection_string': 'DefaultEndpointsProtocol=https;AccountName=iqserdevelopment;AccountKey=4imAbV9PYXaztSOMpIyAClg88bAZCXuXMGJG0GA1eIBpdh2PlnFGoRBnKqLy2YZUSTmZ3wJfC7tzfHtuC6FEhQ==;EndpointSuffix=core.windows.net'},\n",
|
||||
" 'bucket': 'pyinfra-test-bucket',\n",
|
||||
" 'minio': {'access_key': 'root',\n",
|
||||
" 'endpoint': 'http://127.0.0.1:9000',\n",
|
||||
" 'region': None,\n",
|
||||
" 'secret_key': 'password'}},\n",
|
||||
" 'use_docker_fixture': 1,\n",
|
||||
" 'webserver': {'host': '$SERVER_HOST|\"127.0.0.1\"',\n",
|
||||
" 'mode': '$SERVER_MODE|production',\n",
|
||||
" 'port': '$SERVER_PORT|5000'}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\n",
|
||||
"# Open the file and load the file\n",
|
||||
"with open('./tests/config.yml') as f:\n",
|
||||
" data = yaml.load(f, Loader=FullLoader)\n",
|
||||
" pprint.pprint(data)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 22,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"os.environ[\"STORAGE_BACKEND\"] = \"azure\"\n",
|
||||
"\n",
|
||||
"# always the same\n",
|
||||
"os.environ[\"STORAGE_BUCKET_NAME\"] = \"pyinfra-test-bucket\"\n",
|
||||
"\n",
|
||||
"# s3\n",
|
||||
"os.environ[\"STORAGE_ENDPOINT\"] = \"https://s3.amazonaws.com\"\n",
|
||||
"os.environ[\"STORAGE_KEY\"] = \"AKIA4QVP6D4LCDAGYGN2\"\n",
|
||||
"os.environ[\"STORAGE_SECRET\"] = \"8N6H1TUHTsbvW2qMAm7zZlJ63hMqjcXAsdN7TYED\"\n",
|
||||
"os.environ[\"STORAGE_REGION\"] = \"eu-west-1\"\n",
|
||||
"\n",
|
||||
"# aks\n",
|
||||
"os.environ[\"STORAGE_AZURECONNECTIONSTRING\"] = \"DefaultEndpointsProtocol=https;AccountName=iqserdevelopment;AccountKey=4imAbV9PYXaztSOMpIyAClg88bAZCXuXMGJG0GA1eIBpdh2PlnFGoRBnKqLy2YZUSTmZ3wJfC7tzfHtuC6FEhQ==;EndpointSuffix=core.windows.net\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "Exception",
|
||||
"evalue": "Unknown storage backend 'aks'.",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mException\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn [23], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m config \u001b[38;5;241m=\u001b[39m pyinfra\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mget_config()\n\u001b[0;32m----> 2\u001b[0m storage \u001b[38;5;241m=\u001b[39m \u001b[43mpyinfra\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstorage\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_storage\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n",
|
||||
"File \u001b[0;32m~/dev/pyinfra/pyinfra/storage/storage.py:15\u001b[0m, in \u001b[0;36mget_storage\u001b[0;34m(config)\u001b[0m\n\u001b[1;32m 13\u001b[0m storage \u001b[39m=\u001b[39m get_azure_storage(config)\n\u001b[1;32m 14\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m---> 15\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mException\u001b[39;00m(\u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mUnknown storage backend \u001b[39m\u001b[39m'\u001b[39m\u001b[39m{\u001b[39;00mconfig\u001b[39m.\u001b[39mstorage_backend\u001b[39m}\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m.\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m 17\u001b[0m \u001b[39mreturn\u001b[39;00m storage\n",
|
||||
"\u001b[0;31mException\u001b[0m: Unknown storage backend 'aks'."
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"config = pyinfra.config.get_config()\n",
|
||||
"storage = pyinfra.storage.get_storage(config)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 21,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"False"
|
||||
]
|
||||
},
|
||||
"execution_count": 21,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"storage.has_bucket(config.storage_bucket)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.8.13 ('pyinfra-TboPpZ8z-py3.8')",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.13"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "10d7419af5ea6dfec0078ebc9d6fa1a9383fe9894853f90dc7d29a81b3de2c78"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
79
tests/config.yml
Normal file
79
tests/config.yml
Normal file
@ -0,0 +1,79 @@
|
||||
service:
|
||||
response_formatter: identity
|
||||
operations:
|
||||
upper:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: up_in.gz
|
||||
multi: False
|
||||
output:
|
||||
subdir: ""
|
||||
extension: up_out.gz
|
||||
extract:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: extr_in.gz
|
||||
multi: False
|
||||
output:
|
||||
subdir: "extractions"
|
||||
extension: gz
|
||||
rotate:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: rot_in.gz
|
||||
multi: False
|
||||
output:
|
||||
subdir: ""
|
||||
extension: rot_out.gz
|
||||
classify:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: cls_in.gz
|
||||
multi: True
|
||||
output:
|
||||
subdir: ""
|
||||
extension: cls_out.gz
|
||||
stream_pages:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: pgs_in.gz
|
||||
multi: False
|
||||
output:
|
||||
subdir: "pages"
|
||||
extension: pgs_out.gz
|
||||
default:
|
||||
input:
|
||||
subdir: ""
|
||||
extension: IN.gz
|
||||
multi: False
|
||||
output:
|
||||
subdir: ""
|
||||
extension: OUT.gz
|
||||
|
||||
storage:
|
||||
minio:
|
||||
endpoint: "http://127.0.0.1:9000"
|
||||
access_key: root
|
||||
secret_key: password
|
||||
region: null
|
||||
|
||||
aws:
|
||||
endpoint: https://s3.amazonaws.com
|
||||
access_key: AKIA4QVP6D4LCDAGYGN2
|
||||
secret_key: 8N6H1TUHTsbvW2qMAm7zZlJ63hMqjcXAsdN7TYED
|
||||
region: $STORAGE_REGION|"eu-west-1"
|
||||
|
||||
azure:
|
||||
connection_string: "DefaultEndpointsProtocol=https;AccountName=iqserdevelopment;AccountKey=4imAbV9PYXaztSOMpIyAClg88bAZCXuXMGJG0GA1eIBpdh2PlnFGoRBnKqLy2YZUSTmZ3wJfC7tzfHtuC6FEhQ==;EndpointSuffix=core.windows.net"
|
||||
|
||||
bucket: "pyinfra-test-bucket"
|
||||
|
||||
webserver:
|
||||
host: $SERVER_HOST|"127.0.0.1" # webserver address
|
||||
port: $SERVER_PORT|5000 # webserver port
|
||||
mode: $SERVER_MODE|production # webserver mode: {development, production}
|
||||
|
||||
mock_analysis_endpoint: "http://127.0.0.1:5000"
|
||||
|
||||
use_docker_fixture: 1
|
||||
logging: 0
|
||||
Loading…
x
Reference in New Issue
Block a user