From db3b23c475fef17d1a13016dd6b044dc83147946 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 27 Aug 2024 08:25:49 -0300 Subject: [PATCH] Use exasol-toolbox (#226) * Added exasol-toolbox --- .github/workflows/checks.yml | 35 + .gitignore | 5 +- .pre-commit-config.yaml | 36 + README.md | 25 + .../cli/commands/__init__.py | 8 +- .../cli/commands/build.py | 146 ++- .../cli/commands/build_test_container.py | 67 +- .../cli/commands/clean.py | 68 +- .../cli/commands/export.py | 139 +- .../commands/generate_language_activation.py | 28 +- .../cli/commands/install_starter_scripts.py | 29 +- .../cli/commands/push.py | 129 +- .../cli/commands/push_test_container.py | 80 +- .../cli/commands/run_db_tests.py | 417 +++--- .../cli/commands/save.py | 151 ++- .../cli/commands/security_scan.py | 115 +- .../cli/commands/upload.py | 178 +-- .../cli/options/flavor_options.py | 24 +- .../cli/options/goal_options.py | 22 +- .../cli/options/test_container_options.py | 9 +- .../lib/api/__init__.py | 9 +- .../lib/api/api_errors.py | 1 - .../lib/api/build.py | 132 +- .../lib/api/build_test_container.py | 58 +- .../lib/api/clean.py | 100 +- .../lib/api/export.py | 133 +- .../lib/api/generate_language_activation.py | 41 +- .../lib/api/install_starter_scripts.py | 15 +- .../lib/api/push.py | 136 +- .../lib/api/push_test_container.py | 63 +- .../lib/api/run_db_tests.py | 286 +++-- .../lib/api/save.py | 136 +- .../lib/api/security_scan.py | 124 +- .../lib/api/upload.py | 165 ++- .../lib/tasks/build/docker_build.py | 18 +- .../tasks/build/docker_flavor_build_base.py | 34 +- .../tasks/build/docker_flavor_image_task.py | 55 +- .../lib/tasks/clean/clean_images.py | 90 +- .../tasks/export/create_export_directory.py | 4 +- .../export/export_container_base_task.py | 210 ++- .../lib/tasks/export/export_container_task.py | 33 +- .../export/export_container_tasks_creator.py | 53 +- .../lib/tasks/export/export_containers.py | 57 +- .../lib/tasks/export/export_info.py | 21 +- .../run_starter_script_installation.py | 43 +- .../lib/tasks/push/docker_push.py | 23 +- .../lib/tasks/save/docker_save.py | 22 +- .../lib/tasks/security_scan/security_scan.py | 105 +- .../security_scan/security_scan_parameter.py | 4 +- .../lib/tasks/test/populate_test_engine.py | 9 +- .../test/run_db_generic_language_tests.py | 50 +- .../lib/tasks/test/run_db_test.py | 205 ++- .../lib/tasks/test/run_db_test_files.py | 60 +- .../lib/tasks/test/run_db_test_folder.py | 55 +- .../tasks/test/run_db_test_in_directory.py | 68 +- .../lib/tasks/test/run_db_test_result.py | 52 +- .../tasks/test/run_db_tests_in_test_config.py | 93 +- .../lib/tasks/test/run_db_tests_parameter.py | 18 +- .../lib/tasks/test/test_container.py | 270 ++-- .../lib/tasks/test/test_container_content.py | 34 +- .../tasks/test/test_runner_db_test_task.py | 209 +-- .../tasks/test/upload_exported_container.py | 20 +- .../tasks/test/upload_file_to_bucket_fs.py | 128 +- .../lib/tasks/upload/language_definition.py | 37 +- .../upload/upload_container_base_task.py | 73 +- .../upload/upload_container_parameter.py | 8 +- .../lib/tasks/upload/upload_container_task.py | 32 +- .../upload/upload_container_tasks_creator.py | 37 +- .../lib/tasks/upload/upload_containers.py | 49 +- .../upload/upload_containers_parameter.py | 4 +- .../lib/utils/docker_utils.py | 25 +- .../lib/utils/tar_safe_extract.py | 10 +- .../main.py | 7 +- .../py.typed | 0 .../version.py | 10 + noxconfig.py | 20 + noxfile.py | 7 + poetry.lock | 1129 ++++++++++++++++- pyproject.toml | 28 + scripts/build/check_release.py | 10 +- .../real_flavor_base/build_steps.py | 31 +- .../real_flavor_base/build_steps.py | 31 +- .../tests/test/docker_environment_test.py | 34 +- .../full/tests/test/empty_test.py | 11 +- .../full/tests/test/test_builtin_languages.py | 38 +- .../test/test_container_docker_credentials.py | 10 +- test/test_build_test_container.py | 37 +- test/test_click_api_consistency.py | 54 +- test/test_docker_api_build.py | 58 +- test/test_docker_api_export.py | 20 +- test/test_docker_api_push.py | 47 +- test/test_docker_build.py | 33 +- test/test_docker_clean.py | 8 +- test/test_docker_export.py | 31 +- test/test_docker_load.py | 8 +- test/test_docker_pull.py | 28 +- test/test_docker_push.py | 24 +- test/test_docker_save.py | 22 +- test/test_docker_upload.py | 124 +- test/test_generate_language_activation.py | 50 +- test/test_install_starter_scripts.py | 85 +- test/test_language_definition.py | 57 +- test/test_push_test_container.py | 27 +- test/test_run_db_test_builtin_languages.py | 26 +- test/test_run_db_test_docker_credentials.py | 36 +- test/test_run_db_test_docker_db.py | 30 +- ...t_run_db_test_docker_db_check_arguments.py | 92 +- test/test_run_db_test_docker_db_reuse.py | 38 +- test/test_run_db_test_docker_pass_through.py | 16 +- test/test_run_db_test_external_db.py | 48 +- test/test_security_scan.py | 23 +- test/utils.py | 127 +- 112 files changed, 5405 insertions(+), 2538 deletions(-) create mode 100644 .github/workflows/checks.yml create mode 100644 .pre-commit-config.yaml create mode 100644 exasol_script_languages_container_tool/py.typed create mode 100644 exasol_script_languages_container_tool/version.py create mode 100644 noxconfig.py create mode 100644 noxfile.py diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml new file mode 100644 index 00000000..44e05c8e --- /dev/null +++ b/.github/workflows/checks.yml @@ -0,0 +1,35 @@ +name: Checks + +on: + pull_request: + +jobs: + + lint-job: + name: Linting and Type checks (Python-${{ matrix.python-version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - name: SCM Checkout + uses: actions/checkout@v4 + + - name: Setup Python & Poetry Environment + uses: exasol/python-toolbox/.github/actions/python-environment@0.14.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Run Tests + run: poetry run nox -s lint + + - name: Run type-check + run: poetry run nox -s type-check + + - name: Upload Artifacts + uses: actions/upload-artifact@v4 + with: + name: ".lint-python-${{ matrix.python-version }}.txt" + path: .lint.txt diff --git a/.gitignore b/.gitignore index ebc0f9f9..0255468e 100644 --- a/.gitignore +++ b/.gitignore @@ -141,4 +141,7 @@ dmypy.json .build_output/ # Emacs -TAGS \ No newline at end of file +TAGS + +.lint.json +.lint.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..017e6b82 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +default_stages: [ commit ] +repos: + + - repo: local + hooks: + - id: code-format + name: code-format + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s fix + + - repo: local + hooks: + - id: type-check + name: type-check + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s type-check + + - repo: local + hooks: + - id: lint + name: lint + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s lint + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace diff --git a/README.md b/README.md index bb26b0b2..94239555 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,31 @@ You can build, export and upload script-language container from so-called flavor which are description how to build the script language container. You can find pre-defined flavors in the [script-languages-release](https://github.com/exasol/script-languages-release) repository. There we also described how you could customize these flavors to your needs. +

Exasol Toolbox

+ +

+Building script-language container for extending Exasol UDFs. +

+ +

+ + + Checks Main + + + License + + + Downloads + + + Supported Python Versions + + + PyPi Package + +

+ ## In a Nutshell diff --git a/exasol_script_languages_container_tool/cli/commands/__init__.py b/exasol_script_languages_container_tool/cli/commands/__init__.py index 17018bc2..da578de9 100644 --- a/exasol_script_languages_container_tool/cli/commands/__init__.py +++ b/exasol_script_languages_container_tool/cli/commands/__init__.py @@ -1,12 +1,12 @@ from .build import build +from .build_test_container import build_test_container from .clean import clean_all_images, clean_flavor_images from .export import export from .generate_language_activation import generate_language_activation +from .install_starter_scripts import install_starter_scripts from .push import push +from .push_test_container import push_test_container from .run_db_tests import run_db_test from .save import save -from .upload import upload from .security_scan import security_scan -from .install_starter_scripts import install_starter_scripts -from .build_test_container import build_test_container -from .push_test_container import push_test_container +from .upload import upload diff --git a/exasol_script_languages_container_tool/cli/commands/build.py b/exasol_script_languages_container_tool/cli/commands/build.py index 45206626..6a76ed79 100644 --- a/exasol_script_languages_container_tool/cli/commands/build.py +++ b/exasol_script_languages_container_tool/cli/commands/build.py @@ -1,14 +1,25 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple import click from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) from exasol_script_languages_container_tool.cli.options.goal_options import goal_options from exasol_script_languages_container_tool.lib import api @@ -17,71 +28,76 @@ @add_options(flavor_options) @add_options(goal_options) @add_options(build_options) -@click.option('--shortcut-build/--no-shortcut-build', default=True, - help="Forces the system to complete to build all all stages, " - "but not to rebuild them. If the target images are locally available " - "they will be used as is. If the source images locally available " - "they will be taged with target image name. " - "If the source images can be loaded from file or pulled from a docker registry " - "they will get loaded or pulled. The only case, in which them get builded is " - "when they are not otherwise available. " - "This includes the case where a higher stage which transitivily " - "depends on a images is somewhere available, " - "but the images as self is not available.") +@click.option( + "--shortcut-build/--no-shortcut-build", + default=True, + help="Forces the system to complete to build all all stages, " + "but not to rebuild them. If the target images are locally available " + "they will be used as is. If the source images locally available " + "they will be taged with target image name. " + "If the source images can be loaded from file or pulled from a docker registry " + "they will get loaded or pulled. The only case, in which them get builded is " + "when they are not otherwise available. " + "This includes the case where a higher stage which transitivily " + "depends on a images is somewhere available, " + "but the images as self is not available.", +) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def build(flavor_path: Tuple[str, ...], - goal: Tuple[str, ...], - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - shortcut_build: bool, - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def build( + flavor_path: Tuple[str, ...], + goal: Tuple[str, ...], + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + shortcut_build: bool, + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command builds all stages of the script-language-container flavor. If stages are cached in a docker registry, this command is going to pull them, instead of building them. """ with TerminationHandler(): - api.build(flavor_path=flavor_path, - goal=goal, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - shortcut_build=shortcut_build, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + api.build( + flavor_path=flavor_path, + goal=goal, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + shortcut_build=shortcut_build, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/cli/commands/build_test_container.py b/exasol_script_languages_container_tool/cli/commands/build_test_container.py index 627ef434..8ea38d80 100644 --- a/exasol_script_languages_container_tool/cli/commands/build_test_container.py +++ b/exasol_script_languages_container_tool/cli/commands/build_test_container.py @@ -1,13 +1,24 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.test_container_options import test_container_options +from exasol_script_languages_container_tool.cli.options.test_container_options import ( + test_container_options, +) from exasol_script_languages_container_tool.lib import api @@ -18,27 +29,27 @@ @add_options(system_options) @add_options(luigi_logging_options) def build_test_container( - test_container_folder: str, - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool + test_container_folder: str, + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, ): """ Builds the test container docker image. @@ -67,5 +78,5 @@ def build_test_container( workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) diff --git a/exasol_script_languages_container_tool/cli/commands/clean.py b/exasol_script_languages_container_tool/cli/commands/clean.py index 8295b947..37e8f9d5 100644 --- a/exasol_script_languages_container_tool/cli/commands/clean.py +++ b/exasol_script_languages_container_tool/cli/commands/clean.py @@ -1,32 +1,43 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import \ - simple_docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import output_directory_option, \ - system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + simple_docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + output_directory_option, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) from exasol_script_languages_container_tool.lib import api -@cli.command(short_help="Cleans script-languages-container docker images for the given flavor.") +@cli.command( + short_help="Cleans script-languages-container docker images for the given flavor." +) @add_options(flavor_options) @add_options([output_directory_option]) @add_options(simple_docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def clean_flavor_images(flavor_path: Tuple[str, ...], - output_directory: str, - docker_repository_name: str, - docker_tag_prefix: str, - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def clean_flavor_images( + flavor_path: Tuple[str, ...], + output_directory: str, + docker_repository_name: str, + docker_tag_prefix: str, + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command removes the docker images of all stages of the script languages container for the given flavor. """ @@ -39,23 +50,25 @@ def clean_flavor_images(flavor_path: Tuple[str, ...], workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) -@cli.command(short_help="Cleans all script-languages-container docker images for all flavors.") +@cli.command( + short_help="Cleans all script-languages-container docker images for all flavors." +) @add_options([output_directory_option]) @add_options(simple_docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) def clean_all_images( - output_directory: str, - docker_repository_name: str, - docker_tag_prefix: str, - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool + output_directory: str, + docker_repository_name: str, + docker_tag_prefix: str, + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, ): """ This command removes the docker images of all stages of the script languages container for all flavors. @@ -68,7 +81,8 @@ def clean_all_images( workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) + # TODO add commands clean containers, networks, all diff --git a/exasol_script_languages_container_tool/cli/commands/export.py b/exasol_script_languages_container_tool/cli/commands/export.py index 7ffc5373..75b4a622 100644 --- a/exasol_script_languages_container_tool/cli/commands/export.py +++ b/exasol_script_languages_container_tool/cli/commands/export.py @@ -1,82 +1,101 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple import click from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options -from exasol_script_languages_container_tool.cli.options.goal_options import release_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) +from exasol_script_languages_container_tool.cli.options.goal_options import ( + release_options, +) from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Exports the script-language-container.") @add_options(flavor_options) @add_options(release_options) -@click.option('--export-path', type=click.Path(exists=False, file_okay=False, dir_okay=True), default=None) -@click.option('--release-name', type=str, default=None) +@click.option( + "--export-path", + type=click.Path(exists=False, file_okay=False, dir_okay=True), + default=None, +) +@click.option("--release-name", type=str, default=None) @add_options(build_options) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def export(flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...], - export_path: Optional[str], - release_name: Optional[str], - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def export( + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...], + export_path: Optional[str], + release_name: Optional[str], + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command exports the whole script-language-container package of the flavor, ready for the upload into the bucketfs. If the stages do not exists locally, the system will build or pull them before the exporting the packaged container. """ with TerminationHandler(): - export_result = api.export(flavor_path=flavor_path, - release_goal=release_goal, - export_path=export_path, - release_name=release_name, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) - with open(export_result.command_line_output_path, "r") as f: + export_result = api.export( + flavor_path=flavor_path, + release_goal=release_goal, + export_path=export_path, + release_name=release_name, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) + with open(export_result.command_line_output_path) as f: print(f.read()) diff --git a/exasol_script_languages_container_tool/cli/commands/generate_language_activation.py b/exasol_script_languages_container_tool/cli/commands/generate_language_activation.py index 0772b109..864617ff 100644 --- a/exasol_script_languages_container_tool/cli/commands/generate_language_activation.py +++ b/exasol_script_languages_container_tool/cli/commands/generate_language_activation.py @@ -2,25 +2,29 @@ from exasol_integration_test_docker_environment.cli.cli import cli from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import single_flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + single_flavor_options, +) from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Generate the language activation statement.") @add_options(single_flavor_options) -@click.option('--bucketfs-name', type=str, required=True) -@click.option('--bucket-name', type=str, required=True) -@click.option('--container-name', type=str, required=True) -@click.option('--path-in-bucket', type=str, required=False, default="") +@click.option("--bucketfs-name", type=str, required=True) +@click.option("--bucket-name", type=str, required=True) +@click.option("--container-name", type=str, required=True) +@click.option("--path-in-bucket", type=str, required=False, default="") def generate_language_activation( - flavor_path: str, - bucketfs_name: str, - bucket_name: str, - container_name: str, - path_in_bucket: str): + flavor_path: str, + bucketfs_name: str, + bucket_name: str, + container_name: str, + path_in_bucket: str, +): """ Generate the language activation statement. """ - _, _, result = \ - api.generate_language_activation(flavor_path, bucketfs_name, bucket_name, container_name, path_in_bucket) + _, _, result = api.generate_language_activation( + flavor_path, bucketfs_name, bucket_name, container_name, path_in_bucket + ) print(result) diff --git a/exasol_script_languages_container_tool/cli/commands/install_starter_scripts.py b/exasol_script_languages_container_tool/cli/commands/install_starter_scripts.py index ba0ce4be..21e78168 100644 --- a/exasol_script_languages_container_tool/cli/commands/install_starter_scripts.py +++ b/exasol_script_languages_container_tool/cli/commands/install_starter_scripts.py @@ -1,21 +1,30 @@ import click - from exasol_integration_test_docker_environment.cli.cli import cli from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Install starter scripts.") -@click.option("--install-path", default=".", - type=click.Path(file_okay=False, dir_okay=True), - help="Target path where starter scripts will be deployed.") -@click.option("--script-dir", default="exaslct_scripts", type=str, - help="Subdirectory in install path where starter scripts will be deployed.") -@click.option('--force-install/--no-force-install', default=False, - help="Forces installation. No prompts will be shown if files/directories already exists. " - "They will be silently overwritten.") +@click.option( + "--install-path", + default=".", + type=click.Path(file_okay=False, dir_okay=True), + help="Target path where starter scripts will be deployed.", +) +@click.option( + "--script-dir", + default="exaslct_scripts", + type=str, + help="Subdirectory in install path where starter scripts will be deployed.", +) +@click.option( + "--force-install/--no-force-install", + default=False, + help="Forces installation. No prompts will be shown if files/directories already exists. " + "They will be silently overwritten.", +) def install_starter_scripts(install_path: str, script_dir: str, force_install: bool): - """" + """ " This command installs the starter scripts which can be used to run this project automatically in an isolated environment. """ diff --git a/exasol_script_languages_container_tool/cli/commands/push.py b/exasol_script_languages_container_tool/cli/commands/push.py index 00cd4f1f..8ec81817 100644 --- a/exasol_script_languages_container_tool/cli/commands/push.py +++ b/exasol_script_languages_container_tool/cli/commands/push.py @@ -1,14 +1,27 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.push_options import push_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.push_options import ( + push_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) from exasol_script_languages_container_tool.cli.options.goal_options import goal_options from exasol_script_languages_container_tool.lib import api @@ -21,58 +34,60 @@ @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def push(flavor_path: Tuple[str, ...], - goal: Tuple[str, ...], - force_push: bool, - push_all: bool, - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def push( + flavor_path: Tuple[str, ...], + goal: Tuple[str, ...], + force_push: bool, + push_all: bool, + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command pushes all stages of the script-language-container flavor. If the stages do not exists locally, the system will build or pull them before the push. """ with TerminationHandler(): - api.push(flavor_path=flavor_path, - goal=goal, - force_push=force_push, - push_all=push_all, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + api.push( + flavor_path=flavor_path, + goal=goal, + force_push=force_push, + push_all=push_all, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/cli/commands/push_test_container.py b/exasol_script_languages_container_tool/cli/commands/push_test_container.py index 9100fda1..c0096e5d 100644 --- a/exasol_script_languages_container_tool/cli/commands/push_test_container.py +++ b/exasol_script_languages_container_tool/cli/commands/push_test_container.py @@ -1,49 +1,65 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple import click -from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.cli import cli # type: ignore +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.test_container_options import test_container_options +from exasol_script_languages_container_tool.cli.options.test_container_options import ( + test_container_options, +) from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Pushes the test container docker image to the registry.") @add_options(test_container_options) -@click.option("--force-push", type=bool, default=False, help="Push images also if they already exist.") +@click.option( + "--force-push", + type=bool, + default=False, + help="Push images also if they already exist.", +) @click.option("--push-all", type=bool, default=False, help="Push all images.") @add_options(build_options) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) def push_test_container( - test_container_folder: str, - force_push: bool, - push_all: bool, - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool + test_container_folder: str, + force_push: bool, + push_all: bool, + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, ): """ Push the test container docker image to the registry. @@ -74,5 +90,5 @@ def push_test_container( workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) diff --git a/exasol_script_languages_container_tool/cli/commands/run_db_tests.py b/exasol_script_languages_container_tool/cli/commands/run_db_tests.py index 47a5c659..53a0b6ef 100644 --- a/exasol_script_languages_container_tool/cli/commands/run_db_tests.py +++ b/exasol_script_languages_container_tool/cli/commands/run_db_tests.py @@ -1,19 +1,36 @@ -from typing import Tuple, Optional, Any +from typing import Any, Optional, Tuple import click -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler -from exasol_integration_test_docker_environment.lib.api.common import add_options - -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options -from exasol_script_languages_container_tool.cli.options.goal_options import release_options from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.options.test_environment_options import test_environment_options, \ - docker_db_options, external_db_options +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.options.test_environment_options import ( + docker_db_options, + external_db_options, + test_environment_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) +from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.test_container_options import test_container_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) +from exasol_script_languages_container_tool.cli.options.goal_options import ( + release_options, +) +from exasol_script_languages_container_tool.cli.options.test_container_options import ( + test_container_options, +) from exasol_script_languages_container_tool.lib import api from exasol_script_languages_container_tool.lib.api import api_errors @@ -21,119 +38,171 @@ @cli.command(short_help="Runs integration tests.") @add_options(flavor_options) @add_options(release_options) -@click.option('--generic-language-test', multiple=True, type=str, - help="Specifies for which languages the test runner executes generic language tests." - "The option can be repeated with different languages. " - "The test runner will run the generic language test for each language." - ) -@click.option('--test-folder', multiple=True, type=click.Path(), - help="Specifies in which directories the test runners looks for test files to execute." - "The option can be repeated with different directories. " - "The test runner will run the test files in each of these directories." - ) -@click.option('--test-file', multiple=True, type=click.Path(), - help="Specifies in which test-files the test runners should execute." - "The option can be repeated with different test files. " - "The test runner will run all specified test files." - ) -@click.option('--test-language', multiple=True, type=str, default=[None], - help="Specifies with which language the test files get executed." - "The option can be repeated with different languages. " - "The test runner will run the test files with all specified languages." - ) -@click.option('--test', multiple=True, type=str, - help="Define restriction which tests in the test files should be executed." - "The option can be repeated with different restrictions. " - "The test runner will run the test files with all specified restrictions." - ) +@click.option( + "--generic-language-test", + multiple=True, + type=str, + help="Specifies for which languages the test runner executes generic language tests." + "The option can be repeated with different languages. " + "The test runner will run the generic language test for each language.", +) +@click.option( + "--test-folder", + multiple=True, + type=click.Path(), + help="Specifies in which directories the test runners looks for test files to execute." + "The option can be repeated with different directories. " + "The test runner will run the test files in each of these directories.", +) +@click.option( + "--test-file", + multiple=True, + type=click.Path(), + help="Specifies in which test-files the test runners should execute." + "The option can be repeated with different test files. " + "The test runner will run all specified test files.", +) +@click.option( + "--test-language", + multiple=True, + type=str, + default=[None], + help="Specifies with which language the test files get executed." + "The option can be repeated with different languages. " + "The test runner will run the test files with all specified languages.", +) +@click.option( + "--test", + multiple=True, + type=str, + help="Define restriction which tests in the test files should be executed." + "The option can be repeated with different restrictions. " + "The test runner will run the test files with all specified restrictions.", +) @add_options(test_environment_options) @add_options(docker_db_options) @add_options(external_db_options) -@click.option('--db-mem-size', type=str, default="2 GiB", show_default=True, - help="The main memory used by the database. Format , e.g. 1 GiB. The minimum size is 1 GB, below that the database will not start.") -@click.option('--db-disk-size', type=str, default="2 GiB", show_default=True, - help="The disk size available for the database. Format , e.g. 1 GiB. The minimum size is 100 MiB. However, the setup creates volume files with at least 2 GB larger size, because the database needs at least so much more disk.") -@click.option('--test-environment-vars', type=str, default="""{}""", - show_default=True, - help="""Specifies the environment variables for the test runner as a json - in the form of {"":}.""") -@click.option('--test-log-level', default="critical", - type=click.Choice(['critical', 'error', "warning", "info", "debug"]), - show_default=True) -@click.option('--reuse-database/--no-reuse-database', default=False, - help="Reuse a previous create test-database and " - "disables the clean up of the test-database to allow reuse later.") -@click.option('--reuse-database-setup/--no-reuse-database-setup', default=False, - help="Reuse a previous executed database setup in a reused database") -@click.option('--reuse-uploaded-container/--no-reuse-uploaded-container', default=False, - help="Reuse the uploaded script-langauge-container in a reused database.") -@click.option('--reuse-test-container/--no-reuse-test-container', default=False, - help="Reuse the test container which is used for test execution.") -@click.option('--reuse-test-environment/--no-reuse-test-environment', default=False, - help="Reuse the whole test environment with docker network, test container, " - "database, database setup and uploaded container") +@click.option( + "--db-mem-size", + type=str, + default="2 GiB", + show_default=True, + help="The main memory used by the database. Format , e.g. 1 GiB." + " The minimum size is 1 GB, below that the database will not start.", +) +@click.option( + "--db-disk-size", + type=str, + default="2 GiB", + show_default=True, + help="The disk size available for the database. Format , e.g. 1 GiB. The minimum size is 100 MiB." + "However, the setup creates volume files with at least 2 GB larger size," + " because the database needs at least so much more disk.", +) +@click.option( + "--test-environment-vars", + type=str, + default="""{}""", + show_default=True, + help="""Specifies the environment variables for the test runner as a json + in the form of {"":}.""", +) +@click.option( + "--test-log-level", + default="critical", + type=click.Choice(["critical", "error", "warning", "info", "debug"]), + show_default=True, +) +@click.option( + "--reuse-database/--no-reuse-database", + default=False, + help="Reuse a previous create test-database and " + "disables the clean up of the test-database to allow reuse later.", +) +@click.option( + "--reuse-database-setup/--no-reuse-database-setup", + default=False, + help="Reuse a previous executed database setup in a reused database", +) +@click.option( + "--reuse-uploaded-container/--no-reuse-uploaded-container", + default=False, + help="Reuse the uploaded script-langauge-container in a reused database.", +) +@click.option( + "--reuse-test-container/--no-reuse-test-container", + default=False, + help="Reuse the test container which is used for test execution.", +) +@click.option( + "--reuse-test-environment/--no-reuse-test-environment", + default=False, + help="Reuse the whole test environment with docker network, test container, " + "database, database setup and uploaded container", +) @add_options(test_container_options) @add_options(build_options) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def run_db_test(flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...], - generic_language_test: Tuple[str, ...], - test_folder: Tuple[str, ...], - test_file: Tuple[str, ...], - test_language: Tuple[str, ...], - test: Tuple[str, ...], - environment_type: str, - max_start_attempts: int, - docker_db_image_version: str, - docker_db_image_name: str, - db_os_access: str, - create_certificates: bool, - additional_db_parameter: Tuple[str, ...], - external_exasol_db_host: Optional[str], - external_exasol_db_port: int, - external_exasol_bucketfs_port: int, - external_exasol_ssh_port: Optional[int], - external_exasol_db_user: Optional[str], - external_exasol_db_password: Optional[str], - external_exasol_bucketfs_write_password: Optional[str], - external_exasol_xmlrpc_host: Optional[str], - external_exasol_xmlrpc_port: int, - external_exasol_xmlrpc_user: str, - external_exasol_xmlrpc_password: Optional[str], - external_exasol_xmlrpc_cluster_name: str, - db_mem_size: str, - db_disk_size: str, - test_environment_vars: str, - test_log_level: str, - reuse_database: bool, - reuse_database_setup: bool, - reuse_uploaded_container: bool, - reuse_test_container: bool, - reuse_test_environment: bool, - test_container_folder: str, - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def run_db_test( + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...], + generic_language_test: Tuple[str, ...], + test_folder: Tuple[str, ...], + test_file: Tuple[str, ...], + test_language: Tuple[str, ...], + test: Tuple[str, ...], + environment_type: str, + max_start_attempts: int, + docker_db_image_version: str, + docker_db_image_name: str, + db_os_access: str, + create_certificates: bool, + additional_db_parameter: Tuple[str, ...], + external_exasol_db_host: Optional[str], + external_exasol_db_port: int, + external_exasol_bucketfs_port: int, + external_exasol_ssh_port: Optional[int], + external_exasol_db_user: Optional[str], + external_exasol_db_password: Optional[str], + external_exasol_bucketfs_write_password: Optional[str], + external_exasol_xmlrpc_host: Optional[str], + external_exasol_xmlrpc_port: int, + external_exasol_xmlrpc_user: str, + external_exasol_xmlrpc_password: Optional[str], + external_exasol_xmlrpc_cluster_name: str, + db_mem_size: str, + db_disk_size: str, + test_environment_vars: str, + test_log_level: str, + reuse_database: bool, + reuse_database_setup: bool, + reuse_uploaded_container: bool, + reuse_test_container: bool, + reuse_test_environment: bool, + test_container_folder: str, + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command runs the integration tests in local docker-db. The system spawns a test environment in which the test are executed. @@ -143,62 +212,64 @@ def run_db_test(flavor_path: Tuple[str, ...], """ with TerminationHandler(): try: - result = api.run_db_test(flavor_path=flavor_path, - release_goal=release_goal, - generic_language_test=generic_language_test, - test_folder=test_folder, - test_file=test_file, - test_language=test_language, - test=test, - environment_type=environment_type, - max_start_attempts=max_start_attempts, - docker_db_image_version=docker_db_image_version, - docker_db_image_name=docker_db_image_name, - db_os_access=db_os_access, - create_certificates=create_certificates, - additional_db_parameter=additional_db_parameter, - external_exasol_db_host=external_exasol_db_host, - external_exasol_db_port=external_exasol_db_port, - external_exasol_bucketfs_port=external_exasol_bucketfs_port, - external_exasol_db_user=external_exasol_db_user, - external_exasol_db_password=external_exasol_db_password, - external_exasol_ssh_port=external_exasol_ssh_port, - external_exasol_bucketfs_write_password=external_exasol_bucketfs_write_password, - external_exasol_xmlrpc_host=external_exasol_xmlrpc_host, - external_exasol_xmlrpc_port=external_exasol_xmlrpc_port, - external_exasol_xmlrpc_user=external_exasol_xmlrpc_user, - external_exasol_xmlrpc_password=external_exasol_xmlrpc_password, - external_exasol_xmlrpc_cluster_name=external_exasol_xmlrpc_cluster_name, - db_mem_size=db_mem_size, - db_disk_size=db_disk_size, - test_environment_vars=test_environment_vars, - test_log_level=test_log_level, - reuse_database=reuse_database, - reuse_database_setup=reuse_database_setup, - reuse_uploaded_container=reuse_uploaded_container, - reuse_test_container=reuse_test_container, - reuse_test_environment=reuse_test_environment, - test_container_folder=test_container_folder, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file) + result = api.run_db_test( + flavor_path=flavor_path, + release_goal=release_goal, + generic_language_test=generic_language_test, + test_folder=test_folder, + test_file=test_file, + test_language=test_language, + test=test, + environment_type=environment_type, + max_start_attempts=max_start_attempts, + docker_db_image_version=docker_db_image_version, + docker_db_image_name=docker_db_image_name, + db_os_access=db_os_access, + create_certificates=create_certificates, + additional_db_parameter=additional_db_parameter, + external_exasol_db_host=external_exasol_db_host, + external_exasol_db_port=external_exasol_db_port, + external_exasol_bucketfs_port=external_exasol_bucketfs_port, + external_exasol_db_user=external_exasol_db_user, + external_exasol_db_password=external_exasol_db_password, + external_exasol_ssh_port=external_exasol_ssh_port, + external_exasol_bucketfs_write_password=external_exasol_bucketfs_write_password, + external_exasol_xmlrpc_host=external_exasol_xmlrpc_host, + external_exasol_xmlrpc_port=external_exasol_xmlrpc_port, + external_exasol_xmlrpc_user=external_exasol_xmlrpc_user, + external_exasol_xmlrpc_password=external_exasol_xmlrpc_password, + external_exasol_xmlrpc_cluster_name=external_exasol_xmlrpc_cluster_name, + db_mem_size=db_mem_size, + db_disk_size=db_disk_size, + test_environment_vars=test_environment_vars, + test_log_level=test_log_level, + reuse_database=reuse_database, + reuse_database_setup=reuse_database_setup, + reuse_uploaded_container=reuse_uploaded_container, + reuse_test_container=reuse_test_container, + reuse_test_environment=reuse_test_environment, + test_container_folder=test_container_folder, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) if result.command_line_output_path.exists(): with result.command_line_output_path.open("r") as f: print(f.read()) diff --git a/exasol_script_languages_container_tool/cli/commands/save.py b/exasol_script_languages_container_tool/cli/commands/save.py index a973be6a..a02a3f5c 100644 --- a/exasol_script_languages_container_tool/cli/commands/save.py +++ b/exasol_script_languages_container_tool/cli/commands/save.py @@ -1,86 +1,107 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple import click from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) from exasol_script_languages_container_tool.cli.options.goal_options import goal_options from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Saves all stages of a script-language-container flavor.") @add_options(flavor_options) -@click.option('--save-directory', - type=click.Path(file_okay=False, dir_okay=True), - help="Directory where to save the image tarballs") -@click.option('--force-save/--no-force-save', default=False, - help="Forces the system to overwrite existing save for build steps that run") -@click.option('--save-all/--no-save-all', default=False, - help="Forces the system to save all images of build-steps that are specified by the goals") +@click.option( + "--save-directory", + type=click.Path(file_okay=False, dir_okay=True), + help="Directory where to save the image tarballs", +) +@click.option( + "--force-save/--no-force-save", + default=False, + help="Forces the system to overwrite existing save for build steps that run", +) +@click.option( + "--save-all/--no-save-all", + default=False, + help="Forces the system to save all images of build-steps that are specified by the goals", +) @add_options(goal_options) @add_options(build_options) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def save(flavor_path: Tuple[str, ...], - save_directory: Optional[str], - force_save: bool, - save_all: bool, - goal: Tuple[str, ...], - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def save( + flavor_path: Tuple[str, ...], + save_directory: Optional[str], + force_save: bool, + save_all: bool, + goal: Tuple[str, ...], + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command saves all stages of the script-language-container flavor to a local directory. If the stages do not exists locally, the system will build or pull them before the execution of save. """ with TerminationHandler(): - api.save(flavor_path=flavor_path, - save_directory=save_directory, - force_save=force_save, - save_all=save_all, - goal=goal, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + api.save( + flavor_path=flavor_path, + save_directory=save_directory, + force_save=force_save, + save_all=save_all, + goal=goal, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/cli/commands/security_scan.py b/exasol_script_languages_container_tool/cli/commands/security_scan.py index f4f7db4b..b0835cd7 100644 --- a/exasol_script_languages_container_tool/cli/commands/security_scan.py +++ b/exasol_script_languages_container_tool/cli/commands/security_scan.py @@ -1,13 +1,24 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple -from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.cli import cli # type: ignore +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) from exasol_script_languages_container_tool.lib import api @@ -17,56 +28,58 @@ @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def security_scan(flavor_path: Tuple[str, ...], - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def security_scan( + flavor_path: Tuple[str, ...], + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command executes the security scan, which must be defined as separate step in the build steps declaration. The scan runs the docker container of the respective step, passing a folder of the output-dir as argument. If the stages do not exists locally, the system will build or pull them before running the scan. """ with TerminationHandler(): - scan_result = api.security_scan(flavor_path=flavor_path, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + scan_result = api.security_scan( + flavor_path=flavor_path, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) if scan_result.report_path.exists(): with scan_result.report_path.open("r") as f: print(f.read()) diff --git a/exasol_script_languages_container_tool/cli/commands/upload.py b/exasol_script_languages_container_tool/cli/commands/upload.py index b1cd5ebe..192f1f46 100644 --- a/exasol_script_languages_container_tool/cli/commands/upload.py +++ b/exasol_script_languages_container_tool/cli/commands/upload.py @@ -1,104 +1,118 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple import click -from exasol_integration_test_docker_environment.cli.termination_handler import TerminationHandler +from exasol_integration_test_docker_environment.cli.cli import cli # type: ignore +from exasol_integration_test_docker_environment.cli.options.build_options import ( + build_options, +) +from exasol_integration_test_docker_environment.cli.options.docker_repository_options import ( + docker_repository_options, +) +from exasol_integration_test_docker_environment.cli.options.system_options import ( + luigi_logging_options, + system_options, +) +from exasol_integration_test_docker_environment.cli.termination_handler import ( + TerminationHandler, +) from exasol_integration_test_docker_environment.lib.api.common import add_options -from exasol_script_languages_container_tool.cli.options.flavor_options import flavor_options -from exasol_script_languages_container_tool.cli.options.goal_options import release_options -from exasol_integration_test_docker_environment.cli.cli import cli -from exasol_integration_test_docker_environment.cli.options.build_options import build_options -from exasol_integration_test_docker_environment.cli.options.docker_repository_options import docker_repository_options -from exasol_integration_test_docker_environment.cli.options.system_options import system_options, luigi_logging_options +from exasol_script_languages_container_tool.cli.options.flavor_options import ( + flavor_options, +) +from exasol_script_languages_container_tool.cli.options.goal_options import ( + release_options, +) from exasol_script_languages_container_tool.lib import api @cli.command(short_help="Uploads the script-language-container to the database.") @add_options(flavor_options) -@click.option('--database-host', type=str, - required=True) -@click.option('--bucketfs-port', type=int, required=True) -@click.option('--bucketfs-username', type=str, required=True) -@click.option('--bucketfs-name', type=str, required=True) -@click.option('--bucket-name', type=str, required=True) -@click.option('--bucketfs-password', type=str) -@click.option('--bucketfs-https/--no-bucketfs-https', default=False) -@click.option('--path-in-bucket', type=str, required=False, default='') +@click.option("--database-host", type=str, required=True) +@click.option("--bucketfs-port", type=int, required=True) +@click.option("--bucketfs-username", type=str, required=True) +@click.option("--bucketfs-name", type=str, required=True) +@click.option("--bucket-name", type=str, required=True) +@click.option("--bucketfs-password", type=str) +@click.option("--bucketfs-https/--no-bucketfs-https", default=False) +@click.option("--path-in-bucket", type=str, required=False, default="") @add_options(release_options) -@click.option('--release-name', type=str, default=None) +@click.option("--release-name", type=str, default=None) @add_options(build_options) @add_options(docker_repository_options) @add_options(system_options) @add_options(luigi_logging_options) -def upload(flavor_path: Tuple[str, ...], - database_host: str, - bucketfs_port: int, - bucketfs_username: str, - bucketfs_name: str, - bucket_name: str, - bucketfs_password: Optional[str], - bucketfs_https: bool, - path_in_bucket: str, - release_goal: Tuple[str, ...], - release_name: Optional[str], - force_rebuild: bool, - force_rebuild_from: Tuple[str, ...], - force_pull: bool, - output_directory: str, - temporary_base_directory: str, - log_build_context_content: bool, - cache_directory: Optional[str], - build_name: Optional[str], - source_docker_repository_name: str, - source_docker_tag_prefix: str, - source_docker_username: Optional[str], - source_docker_password: Optional[str], - target_docker_repository_name: str, - target_docker_tag_prefix: str, - target_docker_username: Optional[str], - target_docker_password: Optional[str], - workers: int, - task_dependencies_dot_file: Optional[str], - log_level: Optional[str], - use_job_specific_log_file: bool - ): +def upload( + flavor_path: Tuple[str, ...], + database_host: str, + bucketfs_port: int, + bucketfs_username: str, + bucketfs_name: str, + bucket_name: str, + bucketfs_password: Optional[str], + bucketfs_https: bool, + path_in_bucket: str, + release_goal: Tuple[str, ...], + release_name: Optional[str], + force_rebuild: bool, + force_rebuild_from: Tuple[str, ...], + force_pull: bool, + output_directory: str, + temporary_base_directory: str, + log_build_context_content: bool, + cache_directory: Optional[str], + build_name: Optional[str], + source_docker_repository_name: str, + source_docker_tag_prefix: str, + source_docker_username: Optional[str], + source_docker_password: Optional[str], + target_docker_repository_name: str, + target_docker_tag_prefix: str, + target_docker_username: Optional[str], + target_docker_password: Optional[str], + workers: int, + task_dependencies_dot_file: Optional[str], + log_level: Optional[str], + use_job_specific_log_file: bool, +): """ This command uploads the whole script-language-container package of the flavor to the database. If the stages or the packaged container do not exists locally, the system will build, pull or export them before the upload. """ with TerminationHandler(): - result = api.upload(flavor_path=flavor_path, - database_host=database_host, - bucketfs_port=bucketfs_port, - bucketfs_username=bucketfs_username, - bucketfs_name=bucketfs_name, - bucket_name=bucket_name, - bucketfs_password=bucketfs_password, - bucketfs_https=bucketfs_https, - path_in_bucket=path_in_bucket, - release_goal=release_goal, - release_name=release_name, - force_rebuild=force_rebuild, - force_rebuild_from=force_rebuild_from, - force_pull=force_pull, - output_directory=output_directory, - temporary_base_directory=temporary_base_directory, - log_build_context_content=log_build_context_content, - cache_directory=cache_directory, - build_name=build_name, - source_docker_repository_name=source_docker_repository_name, - source_docker_tag_prefix=source_docker_tag_prefix, - source_docker_username=source_docker_username, - source_docker_password=source_docker_password, - target_docker_repository_name=target_docker_repository_name, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=target_docker_username, - target_docker_password=target_docker_password, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + result = api.upload( + flavor_path=flavor_path, + database_host=database_host, + bucketfs_port=bucketfs_port, + bucketfs_username=bucketfs_username, + bucketfs_name=bucketfs_name, + bucket_name=bucket_name, + bucketfs_password=bucketfs_password, + bucketfs_https=bucketfs_https, + path_in_bucket=path_in_bucket, + release_goal=release_goal, + release_name=release_name, + force_rebuild=force_rebuild, + force_rebuild_from=force_rebuild_from, + force_pull=force_pull, + output_directory=output_directory, + temporary_base_directory=temporary_base_directory, + log_build_context_content=log_build_context_content, + cache_directory=cache_directory, + build_name=build_name, + source_docker_repository_name=source_docker_repository_name, + source_docker_tag_prefix=source_docker_tag_prefix, + source_docker_username=source_docker_username, + source_docker_password=source_docker_password, + target_docker_repository_name=target_docker_repository_name, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=target_docker_username, + target_docker_password=target_docker_password, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) with result.open("r") as f: print(f.read()) diff --git a/exasol_script_languages_container_tool/cli/options/flavor_options.py b/exasol_script_languages_container_tool/cli/options/flavor_options.py index f2932e72..42c69d77 100644 --- a/exasol_script_languages_container_tool/cli/options/flavor_options.py +++ b/exasol_script_languages_container_tool/cli/options/flavor_options.py @@ -2,18 +2,24 @@ def create_flavor_option(multiple): - help_message = "Path to the directory with the flavor definition.\n" + \ - "The last segment of the path is used as the name of the flavor." + help_message = ( + "Path to the directory with the flavor definition.\n" + + "The last segment of the path is used as the name of the flavor." + ) if multiple: - help_addition = "The option can be repeated with different flavors.\n" + \ - "The system will run the command for each flavor." + help_addition = ( + "The option can be repeated with different flavors.\n" + + "The system will run the command for each flavor." + ) help_message = help_message + "\n" + help_addition - return click.option('--flavor-path', - required=True, - multiple=multiple, - type=click.Path(exists=True, file_okay=False, dir_okay=True), - help=help_message) + return click.option( + "--flavor-path", + required=True, + multiple=multiple, + type=click.Path(exists=True, file_okay=False, dir_okay=True), + help=help_message, + ) flavor_options = [create_flavor_option(multiple=True)] diff --git a/exasol_script_languages_container_tool/cli/options/goal_options.py b/exasol_script_languages_container_tool/cli/options/goal_options.py index 5206a474..119d5a87 100644 --- a/exasol_script_languages_container_tool/cli/options/goal_options.py +++ b/exasol_script_languages_container_tool/cli/options/goal_options.py @@ -1,17 +1,17 @@ import click goal_options = [ - click.option('--goal', multiple=True, type=str, - help="Selects which build stage will be build or pushed. " - "The system will build also all dependencies of the selected build stage. " - "The option can be repeated with different stages. " - "The system will than build all these stages and their dependencies." - )] + click.option( + "--goal", + multiple=True, + type=str, + help="Selects which build stage will be build or pushed. " + "The system will build also all dependencies of the selected build stage. " + "The option can be repeated with different stages. " + "The system will than build all these stages and their dependencies.", + ) +] release_options = [ - click.option('--release-goal', - type=str, - default=["release"], - multiple=True - ) + click.option("--release-goal", type=str, default=["release"], multiple=True) ] diff --git a/exasol_script_languages_container_tool/cli/options/test_container_options.py b/exasol_script_languages_container_tool/cli/options/test_container_options.py index d248f528..977090fa 100644 --- a/exasol_script_languages_container_tool/cli/options/test_container_options.py +++ b/exasol_script_languages_container_tool/cli/options/test_container_options.py @@ -3,7 +3,10 @@ TEST_CONTAINER_DEFAULT_DIRECTORY = "./test_container" test_container_options = [ - click.option('--test-container-folder', type=click.Path(exists=True, file_okay=False, dir_okay=True), - default=TEST_CONTAINER_DEFAULT_DIRECTORY, - help="Test folder containing 'Dockerfile', tests and test-data.") + click.option( + "--test-container-folder", + type=click.Path(exists=True, file_okay=False, dir_okay=True), + default=TEST_CONTAINER_DEFAULT_DIRECTORY, + help="Test folder containing 'Dockerfile', tests and test-data.", + ) ] diff --git a/exasol_script_languages_container_tool/lib/api/__init__.py b/exasol_script_languages_container_tool/lib/api/__init__.py index 0f3b90b4..da578de9 100644 --- a/exasol_script_languages_container_tool/lib/api/__init__.py +++ b/exasol_script_languages_container_tool/lib/api/__init__.py @@ -1,13 +1,12 @@ - from .build import build +from .build_test_container import build_test_container from .clean import clean_all_images, clean_flavor_images from .export import export from .generate_language_activation import generate_language_activation +from .install_starter_scripts import install_starter_scripts from .push import push +from .push_test_container import push_test_container from .run_db_tests import run_db_test from .save import save -from .upload import upload from .security_scan import security_scan -from .install_starter_scripts import install_starter_scripts -from .build_test_container import build_test_container -from .push_test_container import push_test_container \ No newline at end of file +from .upload import upload diff --git a/exasol_script_languages_container_tool/lib/api/api_errors.py b/exasol_script_languages_container_tool/lib/api/api_errors.py index 87e4f22d..3f9162e2 100644 --- a/exasol_script_languages_container_tool/lib/api/api_errors.py +++ b/exasol_script_languages_container_tool/lib/api/api_errors.py @@ -1,3 +1,2 @@ - class MissingArgumentError(ValueError): pass diff --git a/exasol_script_languages_container_tool/lib/api/build.py b/exasol_script_languages_container_tool/lib/api/build.py index e3cdb87b..67262244 100644 --- a/exasol_script_languages_container_tool/lib/api/build.py +++ b/exasol_script_languages_container_tool/lib/api/build.py @@ -1,38 +1,51 @@ -from typing import Tuple, Optional, Dict +from typing import Dict, Optional, Tuple -from exasol_integration_test_docker_environment.lib.api.common import import_build_steps, set_build_config, \ - set_docker_repository_config, generate_root_task, run_task, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) -from exasol_script_languages_container_tool.lib.tasks.build.docker_build import DockerBuild +from exasol_script_languages_container_tool.lib.tasks.build.docker_build import ( + DockerBuild, +) @cli_function -def build(flavor_path: Tuple[str, ...], - goal: Tuple[str, ...] = tuple(), - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - shortcut_build: bool = True, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> Dict[str, ImageInfo]: +def build( + flavor_path: Tuple[str, ...], + goal: Tuple[str, ...] = tuple(), + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + shortcut_build: bool = True, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> Dict[str, ImageInfo]: """ This command builds all stages of the script-language-container flavor. If stages are cached in a docker registry, they command is going to pull them, @@ -42,28 +55,43 @@ def build(flavor_path: Tuple[str, ...], """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=DockerBuild, - flavor_paths=list(flavor_path), - goals=list(goal), - shortcut_build=shortcut_build) + return generate_root_task( + task_class=DockerBuild, + flavor_paths=list(flavor_path), + goals=list(goal), + shortcut_build=shortcut_build, + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/build_test_container.py b/exasol_script_languages_container_tool/lib/api/build_test_container.py index 36a1ec5f..70d3f34b 100644 --- a/exasol_script_languages_container_tool/lib/api/build_test_container.py +++ b/exasol_script_languages_container_tool/lib/api/build_test_container.py @@ -1,36 +1,42 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple from exasol_integration_test_docker_environment.lib import api from exasol_integration_test_docker_environment.lib.api.common import cli_function -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) -from exasol_script_languages_container_tool.cli.options.test_container_options import TEST_CONTAINER_DEFAULT_DIRECTORY -from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import build_test_container_content +from exasol_script_languages_container_tool.cli.options.test_container_options import ( + TEST_CONTAINER_DEFAULT_DIRECTORY, +) +from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import ( + build_test_container_content, +) @cli_function def build_test_container( - test_container_folder: str = TEST_CONTAINER_DEFAULT_DIRECTORY, - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True + test_container_folder: str = TEST_CONTAINER_DEFAULT_DIRECTORY, + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, ) -> ImageInfo: """ Build the test container docker image. @@ -58,5 +64,5 @@ def build_test_container( workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) diff --git a/exasol_script_languages_container_tool/lib/api/clean.py b/exasol_script_languages_container_tool/lib/api/clean.py index e8f1495b..42b7fc9d 100644 --- a/exasol_script_languages_container_tool/lib/api/clean.py +++ b/exasol_script_languages_container_tool/lib/api/clean.py @@ -1,22 +1,34 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple -from exasol_integration_test_docker_environment.lib.api.common import run_task, generate_root_task, \ - set_output_directory, set_docker_repository_config, import_build_steps, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_docker_repository_config, + set_output_directory, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) -from exasol_script_languages_container_tool.lib.tasks.clean.clean_images import CleanExaslcAllImages, \ - CleanExaslcFlavorsImages +from exasol_script_languages_container_tool.lib.tasks.clean.clean_images import ( + CleanExaslcAllImages, + CleanExaslcFlavorsImages, +) @cli_function -def clean_flavor_images(flavor_path: Tuple[str, ...], - output_directory: str = ".build_output", - docker_repository_name: str = 'exasol/script-language-container', - docker_tag_prefix: str = '', - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True) -> None: +def clean_flavor_images( + flavor_path: Tuple[str, ...], + output_directory: str = ".build_output", + docker_repository_name: str = "exasol/script-language-container", + docker_tag_prefix: str = "", + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> None: """ This command removes the docker images of all stages of the script languages container for the given flavor. raises: @@ -24,44 +36,60 @@ def clean_flavor_images(flavor_path: Tuple[str, ...], """ import_build_steps(flavor_path) set_output_directory(output_directory) - set_docker_repository_config(None, docker_repository_name, None, docker_tag_prefix, "source") - set_docker_repository_config(None, docker_repository_name, None, docker_tag_prefix, "target") + set_docker_repository_config( + None, docker_repository_name, None, docker_tag_prefix, "source" + ) + set_docker_repository_config( + None, docker_repository_name, None, docker_tag_prefix, "target" + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=CleanExaslcFlavorsImages, flavor_paths=list(flavor_path)) + return generate_root_task( + task_class=CleanExaslcFlavorsImages, flavor_paths=list(flavor_path) + ) - run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file) + run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) @cli_function def clean_all_images( - output_directory: str = '.build_output', - docker_repository_name: str = 'exasol/script-language-container', - docker_tag_prefix: str = '', - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True) -> None: + output_directory: str = ".build_output", + docker_repository_name: str = "exasol/script-language-container", + docker_tag_prefix: str = "", + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> None: """ This command removes the docker images of all stages of the script languages container for all flavors. raises: api_errors.TaskFailureError: if operation is not successful. """ set_output_directory(output_directory) - set_docker_repository_config(None, docker_repository_name, None, docker_tag_prefix, "source") - set_docker_repository_config(None, docker_repository_name, None, docker_tag_prefix, "target") + set_docker_repository_config( + None, docker_repository_name, None, docker_tag_prefix, "source" + ) + set_docker_repository_config( + None, docker_repository_name, None, docker_tag_prefix, "target" + ) def root_task_generator() -> DependencyLoggerBaseTask: return generate_root_task(task_class=CleanExaslcAllImages) - run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file) + run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) + # TODO add commands clean containers, networks, all diff --git a/exasol_script_languages_container_tool/lib/api/export.py b/exasol_script_languages_container_tool/lib/api/export.py index 8ff3d9fd..959e0ad1 100644 --- a/exasol_script_languages_container_tool/lib/api/export.py +++ b/exasol_script_languages_container_tool/lib/api/export.py @@ -1,38 +1,50 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple -from exasol_integration_test_docker_environment.lib.api.common import import_build_steps, set_build_config, \ - set_docker_repository_config, generate_root_task, run_task, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) -from exasol_script_languages_container_tool.lib.tasks.export.export_containers import ExportContainers, \ - ExportContainerResult +from exasol_script_languages_container_tool.lib.tasks.export.export_containers import ( + ExportContainerResult, + ExportContainers, +) @cli_function -def export(flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...] = ('release',), - export_path: Optional[str] = None, - release_name: Optional[str] = None, - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True) -> ExportContainerResult: +def export( + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...] = ("release",), + export_path: Optional[str] = None, + release_name: Optional[str] = None, + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> ExportContainerResult: """ This command exports the whole script-language-container package of the flavor, ready for the upload into the bucketfs. If the stages do not exists locally, @@ -41,29 +53,44 @@ def export(flavor_path: Tuple[str, ...], :returns: ExportContainerResult """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=ExportContainers, - flavor_paths=list(flavor_path), - release_goals=list(release_goal), - export_path=export_path, - release_name=release_name) + return generate_root_task( + task_class=ExportContainers, + flavor_paths=list(flavor_path), + release_goals=list(release_goal), + export_path=export_path, + release_name=release_name, + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/generate_language_activation.py b/exasol_script_languages_container_tool/lib/api/generate_language_activation.py index 4a402cd6..21f03b31 100644 --- a/exasol_script_languages_container_tool/lib/api/generate_language_activation.py +++ b/exasol_script_languages_container_tool/lib/api/generate_language_activation.py @@ -4,16 +4,19 @@ from exasol_integration_test_docker_environment.lib.api.common import cli_function -from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import LanguageDefinition +from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import ( + LanguageDefinition, +) @cli_function def generate_language_activation( - flavor_path: str, - bucketfs_name: str, - bucket_name: str, - container_name: str, - path_in_bucket: str = '') -> Tuple[str, str, str]: + flavor_path: str, + bucketfs_name: str, + bucket_name: str, + container_name: str, + path_in_bucket: str = "", +) -> Tuple[str, str, str]: """ Generate the language activation statement. :return: A tuple of language definition statements: The first one is the alter session statement, @@ -21,14 +24,16 @@ def generate_language_activation( the user. """ - language_definition = \ - LanguageDefinition(release_name=container_name, - flavor_path=flavor_path, - bucketfs_name=bucketfs_name, - bucket_name=bucket_name, - path_in_bucket=path_in_bucket) + language_definition = LanguageDefinition( + release_name=container_name, + flavor_path=flavor_path, + bucketfs_name=bucketfs_name, + bucket_name=bucket_name, + path_in_bucket=path_in_bucket, + ) - command_line_output_str = textwrap.dedent(f""" + command_line_output_str = textwrap.dedent( + f""" In SQL, you can activate the languages supported by the {Path(flavor_path).name} flavor by using the following statements: @@ -42,6 +47,10 @@ def generate_language_activation( To activate the flavor on the system: {language_definition.generate_alter_system()} - """) - return language_definition.generate_alter_session(), language_definition.generate_alter_system(), \ - command_line_output_str + """ + ) + return ( + language_definition.generate_alter_session(), + language_definition.generate_alter_system(), + command_line_output_str, + ) diff --git a/exasol_script_languages_container_tool/lib/api/install_starter_scripts.py b/exasol_script_languages_container_tool/lib/api/install_starter_scripts.py index 4e9d7e4f..5501e470 100644 --- a/exasol_script_languages_container_tool/lib/api/install_starter_scripts.py +++ b/exasol_script_languages_container_tool/lib/api/install_starter_scripts.py @@ -2,15 +2,18 @@ from exasol_integration_test_docker_environment.lib.api.common import cli_function -from exasol_script_languages_container_tool.lib.tasks.install_starter_scripts.run_starter_script_installation import \ - run_starter_script_installation +from exasol_script_languages_container_tool.lib.tasks.install_starter_scripts.run_starter_script_installation import ( + run_starter_script_installation, +) @cli_function -def install_starter_scripts(install_path: str = '.', - script_dir: str = 'exaslct_scripts', - force_install: bool = False) -> None: - """" +def install_starter_scripts( + install_path: str = ".", + script_dir: str = "exaslct_scripts", + force_install: bool = False, +) -> None: + """ " This command installs the starter scripts which can be used to run this project automatically in an isolated environment. """ diff --git a/exasol_script_languages_container_tool/lib/api/push.py b/exasol_script_languages_container_tool/lib/api/push.py index c4f2a8a0..0038366b 100644 --- a/exasol_script_languages_container_tool/lib/api/push.py +++ b/exasol_script_languages_container_tool/lib/api/push.py @@ -1,40 +1,53 @@ -from typing import Tuple, Optional, Dict, List +from typing import Dict, List, Optional, Tuple -from exasol_integration_test_docker_environment.lib.api.common import set_docker_repository_config, generate_root_task, \ - run_task, import_build_steps, set_build_config, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) from exasol_script_languages_container_tool.lib.api import api_errors -from exasol_script_languages_container_tool.lib.tasks.push.docker_push import DockerFlavorsPush +from exasol_script_languages_container_tool.lib.tasks.push.docker_push import ( + DockerFlavorsPush, +) @cli_function -def push(flavor_path: Tuple[str, ...], - goal: Tuple[str, ...] = tuple(), - force_push: bool = False, - push_all: bool = False, - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = '.build_output', - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> Dict[str, List[ImageInfo]]: +def push( + flavor_path: Tuple[str, ...], + goal: Tuple[str, ...] = tuple(), + force_push: bool = False, + push_all: bool = False, + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> Dict[str, List[ImageInfo]]: """ This command pushes all stages of the script-language-container flavor. If the stages do not exists locally, the system will build or pull them before the push. @@ -43,29 +56,44 @@ def push(flavor_path: Tuple[str, ...], """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=DockerFlavorsPush, - force_push=force_push, - push_all=push_all, - goals=list(goal), - flavor_paths=list(flavor_path)) + return generate_root_task( + task_class=DockerFlavorsPush, + force_push=force_push, + push_all=push_all, + goals=list(goal), + flavor_paths=list(flavor_path), + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/push_test_container.py b/exasol_script_languages_container_tool/lib/api/push_test_container.py index 2c01b4d6..b0c96497 100644 --- a/exasol_script_languages_container_tool/lib/api/push_test_container.py +++ b/exasol_script_languages_container_tool/lib/api/push_test_container.py @@ -1,38 +1,45 @@ -from typing import Tuple, Optional +from typing import Optional, Tuple from exasol_integration_test_docker_environment.lib import api from exasol_integration_test_docker_environment.lib.api.common import cli_function -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) -from exasol_script_languages_container_tool.cli.options.test_container_options import TEST_CONTAINER_DEFAULT_DIRECTORY -from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import build_test_container_content +from exasol_script_languages_container_tool.cli.options.test_container_options import ( + TEST_CONTAINER_DEFAULT_DIRECTORY, +) +from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import ( + build_test_container_content, +) @cli_function def push_test_container( - test_container_folder: str = TEST_CONTAINER_DEFAULT_DIRECTORY, - force_push: bool = False, - push_all: bool = False, - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True) -> ImageInfo: + test_container_folder: str = TEST_CONTAINER_DEFAULT_DIRECTORY, + force_push: bool = False, + push_all: bool = False, + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> ImageInfo: """ Push the test container docker image to the registry. @@ -61,5 +68,5 @@ def push_test_container( workers=workers, task_dependencies_dot_file=task_dependencies_dot_file, log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file + use_job_specific_log_file=use_job_specific_log_file, ) diff --git a/exasol_script_languages_container_tool/lib/api/run_db_tests.py b/exasol_script_languages_container_tool/lib/api/run_db_tests.py index 145d61e5..ccbe9a87 100644 --- a/exasol_script_languages_container_tool/lib/api/run_db_tests.py +++ b/exasol_script_languages_container_tool/lib/api/run_db_tests.py @@ -1,78 +1,96 @@ import json -from typing import Tuple, Optional +from typing import Optional, Tuple -from exasol_integration_test_docker_environment.cli.options.test_environment_options import LATEST_DB_VERSION -from exasol_integration_test_docker_environment.lib.api.common import run_task, generate_root_task, \ - set_docker_repository_config, set_build_config, import_build_steps, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask -from exasol_integration_test_docker_environment.lib.test_environment.parameter.docker_db_test_environment_parameter import \ - DbOsAccess +from exasol_integration_test_docker_environment.cli.options.test_environment_options import ( + LATEST_DB_VERSION, +) +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) +from exasol_integration_test_docker_environment.lib.data.environment_type import ( + EnvironmentType, +) +from exasol_integration_test_docker_environment.lib.test_environment.parameter.docker_db_test_environment_parameter import ( + DbOsAccess, +) # pylint: disable=line-too-long from exasol_script_languages_container_tool.lib.api import api_errors -from exasol_script_languages_container_tool.lib.tasks.test.test_container import TestContainer, AllTestsResult -from exasol_integration_test_docker_environment.lib.data.environment_type import EnvironmentType - -from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import build_test_container_content +from exasol_script_languages_container_tool.lib.tasks.test.test_container import ( + AllTestsResult, + TestContainer, +) +from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import ( + build_test_container_content, +) @cli_function -def run_db_test(flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...] = ('release',), - generic_language_test: Tuple[str, ...] = tuple(), - test_folder: Tuple[str, ...] = tuple(), - test_file: Tuple[str, ...] = tuple(), - test_language: Tuple[str, ...] = (None,), - test: Tuple[str, ...] = tuple(), - environment_type: str = 'docker_db', - max_start_attempts: int = 2, - docker_db_image_version: str = LATEST_DB_VERSION, - docker_db_image_name: str = "exasol/docker-db", - db_os_access: str = "DOCKER_EXEC", - create_certificates: bool = False, - additional_db_parameter: Tuple[str, ...] = tuple(), - external_exasol_db_host: Optional[str] = None, - external_exasol_db_port: int = 8563, - external_exasol_bucketfs_port: int = 2580, - external_exasol_ssh_port: Optional[int] = None, - external_exasol_db_user: Optional[str] = None, - external_exasol_db_password: Optional[str] = None, - external_exasol_bucketfs_write_password: Optional[str] = None, - external_exasol_xmlrpc_host: Optional[str] = None, - external_exasol_xmlrpc_port: int = 443, - external_exasol_xmlrpc_user: str = "admin", - external_exasol_xmlrpc_password: Optional[str] = None, - external_exasol_xmlrpc_cluster_name: str = "cluster1", - db_mem_size: str = '2 GiB', - db_disk_size: str = '2 GiB', - test_environment_vars: str = "{}", - test_log_level: str = 'critical', - reuse_database: bool = False, - reuse_database_setup: bool = False, - reuse_uploaded_container: bool = False, - reuse_test_container: bool = False, - reuse_test_environment: bool = False, - test_container_folder: str = "./test_container", - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> AllTestsResult: +def run_db_test( + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...] = ("release",), + generic_language_test: Tuple[str, ...] = tuple(), + test_folder: Tuple[str, ...] = tuple(), + test_file: Tuple[str, ...] = tuple(), + test_language: Tuple[str, ...] = (None,), # type: ignore + test: Tuple[str, ...] = tuple(), + environment_type: str = "docker_db", + max_start_attempts: int = 2, + docker_db_image_version: str = LATEST_DB_VERSION, + docker_db_image_name: str = "exasol/docker-db", + db_os_access: str = "DOCKER_EXEC", + create_certificates: bool = False, + additional_db_parameter: Tuple[str, ...] = tuple(), + external_exasol_db_host: Optional[str] = None, + external_exasol_db_port: int = 8563, + external_exasol_bucketfs_port: int = 2580, + external_exasol_ssh_port: Optional[int] = None, + external_exasol_db_user: Optional[str] = None, + external_exasol_db_password: Optional[str] = None, + external_exasol_bucketfs_write_password: Optional[str] = None, + external_exasol_xmlrpc_host: Optional[str] = None, + external_exasol_xmlrpc_port: int = 443, + external_exasol_xmlrpc_user: str = "admin", + external_exasol_xmlrpc_password: Optional[str] = None, + external_exasol_xmlrpc_cluster_name: str = "cluster1", + db_mem_size: str = "2 GiB", + db_disk_size: str = "2 GiB", + test_environment_vars: str = "{}", + test_log_level: str = "critical", + reuse_database: bool = False, + reuse_database_setup: bool = False, + reuse_uploaded_container: bool = False, + reuse_test_container: bool = False, + reuse_test_environment: bool = False, + test_container_folder: str = "./test_container", + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> AllTestsResult: """ This command runs the integration tests in local docker-db. The system spawns a test environment in which the test are executed. @@ -84,18 +102,30 @@ def run_db_test(flavor_path: Tuple[str, ...], :return: result of all test as AllTestsResult object. """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) if reuse_test_environment: reuse_database = True @@ -111,51 +141,53 @@ def run_db_test(flavor_path: Tuple[str, ...], raise api_errors.MissingArgumentError("external_exasol_bucketfs_port") def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=TestContainer, - flavor_paths=list(flavor_path), - release_goals=list(release_goal), - generic_language_tests=list(generic_language_test), - test_folders=list(test_folder), - test_files=list(test_file), - test_restrictions=list(test), - languages=list(test_language), - mem_size=db_mem_size, - disk_size=db_disk_size, - test_environment_vars=json.loads(test_environment_vars), - test_log_level=test_log_level, - reuse_uploaded_container=reuse_uploaded_container, - environment_type=EnvironmentType[environment_type], - reuse_database_setup=reuse_database_setup, - reuse_test_container=reuse_test_container, - reuse_database=reuse_database, - db_os_access=DbOsAccess[db_os_access], - no_test_container_cleanup_after_success=reuse_test_container, - no_test_container_cleanup_after_failure=reuse_test_container, - no_database_cleanup_after_success=reuse_database, - no_database_cleanup_after_failure=reuse_database, - docker_db_image_name=docker_db_image_name, - docker_db_image_version=docker_db_image_version, - max_start_attempts=max_start_attempts, - external_exasol_db_host=external_exasol_db_host, - external_exasol_db_port=external_exasol_db_port, - external_exasol_bucketfs_port=external_exasol_bucketfs_port, - external_exasol_db_user=external_exasol_db_user, - external_exasol_db_password=external_exasol_db_password, - external_exasol_ssh_port=external_exasol_ssh_port, - external_exasol_bucketfs_write_password=external_exasol_bucketfs_write_password, - external_exasol_xmlrpc_host=external_exasol_xmlrpc_host, - external_exasol_xmlrpc_port=external_exasol_xmlrpc_port, - external_exasol_xmlrpc_user=external_exasol_xmlrpc_user, - external_exasol_xmlrpc_password=external_exasol_xmlrpc_password, - external_exasol_xmlrpc_cluster_name=external_exasol_xmlrpc_cluster_name, - create_certificates=create_certificates, - additional_db_parameter=additional_db_parameter, - test_container_content=build_test_container_content(test_container_folder) - ) + return generate_root_task( + task_class=TestContainer, + flavor_paths=list(flavor_path), + release_goals=list(release_goal), + generic_language_tests=list(generic_language_test), + test_folders=list(test_folder), + test_files=list(test_file), + test_restrictions=list(test), + languages=list(test_language), + mem_size=db_mem_size, + disk_size=db_disk_size, + test_environment_vars=json.loads(test_environment_vars), + test_log_level=test_log_level, + reuse_uploaded_container=reuse_uploaded_container, + environment_type=EnvironmentType[environment_type], + reuse_database_setup=reuse_database_setup, + reuse_test_container=reuse_test_container, + reuse_database=reuse_database, + db_os_access=DbOsAccess[db_os_access], + no_test_container_cleanup_after_success=reuse_test_container, + no_test_container_cleanup_after_failure=reuse_test_container, + no_database_cleanup_after_success=reuse_database, + no_database_cleanup_after_failure=reuse_database, + docker_db_image_name=docker_db_image_name, + docker_db_image_version=docker_db_image_version, + max_start_attempts=max_start_attempts, + external_exasol_db_host=external_exasol_db_host, + external_exasol_db_port=external_exasol_db_port, + external_exasol_bucketfs_port=external_exasol_bucketfs_port, + external_exasol_db_user=external_exasol_db_user, + external_exasol_db_password=external_exasol_db_password, + external_exasol_ssh_port=external_exasol_ssh_port, + external_exasol_bucketfs_write_password=external_exasol_bucketfs_write_password, + external_exasol_xmlrpc_host=external_exasol_xmlrpc_host, + external_exasol_xmlrpc_port=external_exasol_xmlrpc_port, + external_exasol_xmlrpc_user=external_exasol_xmlrpc_user, + external_exasol_xmlrpc_password=external_exasol_xmlrpc_password, + external_exasol_xmlrpc_cluster_name=external_exasol_xmlrpc_cluster_name, + create_certificates=create_certificates, + additional_db_parameter=additional_db_parameter, + test_container_content=build_test_container_content(test_container_folder), + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/save.py b/exasol_script_languages_container_tool/lib/api/save.py index b977ce2e..5e45c1f5 100644 --- a/exasol_script_languages_container_tool/lib/api/save.py +++ b/exasol_script_languages_container_tool/lib/api/save.py @@ -1,40 +1,51 @@ -from typing import Tuple, Optional, List, Dict +from typing import Dict, List, Optional, Tuple -from exasol_integration_test_docker_environment.lib.api.common import set_docker_repository_config, generate_root_task, \ - run_task, import_build_steps, set_build_config, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) from exasol_script_languages_container_tool.lib.tasks.save.docker_save import DockerSave @cli_function -def save(flavor_path: Tuple[str, ...], - save_directory: Optional[str] = None, - force_save: bool = False, - save_all: bool = False, - goal: Tuple[str, ...] = tuple(), - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> Dict[str, List[ImageInfo]]: +def save( + flavor_path: Tuple[str, ...], + save_directory: Optional[str] = None, + force_save: bool = False, + save_all: bool = False, + goal: Tuple[str, ...] = tuple(), + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> Dict[str, List[ImageInfo]]: """ This command saves all stages of the script-language-container flavor to a local directory. If the stages do not exists locally, the system will build or pull them before the execution of save. @@ -42,30 +53,45 @@ def save(flavor_path: Tuple[str, ...], :return: List of image infos per flavor. """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=DockerSave, - save_path=save_directory, - force_save=force_save, - save_all=save_all, - flavor_paths=list(flavor_path), - goals=list(goal)) + return generate_root_task( + task_class=DockerSave, + save_path=save_directory, + force_save=force_save, + save_all=save_all, + flavor_paths=list(flavor_path), + goals=list(goal), + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/security_scan.py b/exasol_script_languages_container_tool/lib/api/security_scan.py index 279af70c..5c1fc2ea 100644 --- a/exasol_script_languages_container_tool/lib/api/security_scan.py +++ b/exasol_script_languages_container_tool/lib/api/security_scan.py @@ -1,37 +1,49 @@ from pathlib import Path -from typing import Tuple, Optional +from typing import Optional, Tuple import luigi -from exasol_integration_test_docker_environment.lib.api.common import import_build_steps, set_build_config, \ - set_docker_repository_config, generate_root_task, run_task, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) -from exasol_script_languages_container_tool.lib.tasks.security_scan.security_scan import SecurityScan, AllScanResult +from exasol_script_languages_container_tool.lib.tasks.security_scan.security_scan import ( + AllScanResult, + SecurityScan, +) @cli_function -def security_scan(flavor_path: Tuple[str, ...], - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> AllScanResult: +def security_scan( + flavor_path: Tuple[str, ...], + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> AllScanResult: """ This command executes the security scan, which must be defined as separate step in the build steps declaration. The scan runs the docker container of the respective step, passing a folder of the output-dir as argument. @@ -40,30 +52,44 @@ def security_scan(flavor_path: Tuple[str, ...], :return: Results of all scans. """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) report_path = Path(output_directory).joinpath("security_scan") def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=SecurityScan, - flavor_paths=list(flavor_path), - report_path=str(report_path) - ) + return generate_root_task( + task_class=SecurityScan, + flavor_paths=list(flavor_path), + report_path=str(report_path), + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/api/upload.py b/exasol_script_languages_container_tool/lib/api/upload.py index 5bb4a1e1..022bbe05 100644 --- a/exasol_script_languages_container_tool/lib/api/upload.py +++ b/exasol_script_languages_container_tool/lib/api/upload.py @@ -1,47 +1,58 @@ import getpass -from typing import Tuple, Optional +from typing import Optional, Tuple import luigi -from exasol_integration_test_docker_environment.lib.api.common import import_build_steps, set_build_config, \ - set_docker_repository_config, generate_root_task, run_task, cli_function -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.api.common import ( + cli_function, + generate_root_task, + import_build_steps, + run_task, + set_build_config, + set_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) -from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers import UploadContainers +from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers import ( + UploadContainers, +) @cli_function -def upload(flavor_path: Tuple[str, ...], - database_host: str, - bucketfs_port: int, - bucketfs_username: str, - bucketfs_name: str, - bucket_name: str, - bucketfs_password: Optional[str] = None, - bucketfs_https: bool = False, - path_in_bucket: str = '', - release_goal: Tuple[str, ...] = ('release',), - release_name: Optional[str] = None, - force_rebuild: bool = False, - force_rebuild_from: Tuple[str, ...] = tuple(), - force_pull: bool = False, - output_directory: str = ".build_output", - temporary_base_directory: str = "/tmp", - log_build_context_content: bool = False, - cache_directory: Optional[str] = None, - build_name: Optional[str] = None, - source_docker_repository_name: str = 'exasol/script-language-container', - source_docker_tag_prefix: str = '', - source_docker_username: Optional[str] = None, - source_docker_password: Optional[str] = None, - target_docker_repository_name: str = 'exasol/script-language-container', - target_docker_tag_prefix: str = '', - target_docker_username: Optional[str] = None, - target_docker_password: Optional[str] = None, - workers: int = 5, - task_dependencies_dot_file: Optional[str] = None, - log_level: Optional[str] = None, - use_job_specific_log_file: bool = True - ) -> luigi.LocalTarget: +def upload( + flavor_path: Tuple[str, ...], + database_host: str, + bucketfs_port: int, + bucketfs_username: str, + bucketfs_name: str, + bucket_name: str, + bucketfs_password: Optional[str] = None, + bucketfs_https: bool = False, + path_in_bucket: str = "", + release_goal: Tuple[str, ...] = ("release",), + release_name: Optional[str] = None, + force_rebuild: bool = False, + force_rebuild_from: Tuple[str, ...] = tuple(), + force_pull: bool = False, + output_directory: str = ".build_output", + temporary_base_directory: str = "/tmp", + log_build_context_content: bool = False, + cache_directory: Optional[str] = None, + build_name: Optional[str] = None, + source_docker_repository_name: str = "exasol/script-language-container", + source_docker_tag_prefix: str = "", + source_docker_username: Optional[str] = None, + source_docker_password: Optional[str] = None, + target_docker_repository_name: str = "exasol/script-language-container", + target_docker_tag_prefix: str = "", + target_docker_username: Optional[str] = None, + target_docker_password: Optional[str] = None, + workers: int = 5, + task_dependencies_dot_file: Optional[str] = None, + log_level: Optional[str] = None, + use_job_specific_log_file: bool = True, +) -> luigi.LocalTarget: """ This command uploads the whole script-language-container package of the flavor to the database. If the stages or the packaged container do not exists locally, the system will build, pull or @@ -50,39 +61,57 @@ def upload(flavor_path: Tuple[str, ...], :return: Path to resulting report file. """ import_build_steps(flavor_path) - set_build_config(force_rebuild, - force_rebuild_from, - force_pull, - log_build_context_content, - output_directory, - temporary_base_directory, - cache_directory, - build_name) - set_docker_repository_config(source_docker_password, source_docker_repository_name, source_docker_username, - source_docker_tag_prefix, "source") - set_docker_repository_config(target_docker_password, target_docker_repository_name, target_docker_username, - target_docker_tag_prefix, "target") + set_build_config( + force_rebuild, + force_rebuild_from, + force_pull, + log_build_context_content, + output_directory, + temporary_base_directory, + cache_directory, + build_name, + ) + set_docker_repository_config( + source_docker_password, + source_docker_repository_name, + source_docker_username, + source_docker_tag_prefix, + "source", + ) + set_docker_repository_config( + target_docker_password, + target_docker_repository_name, + target_docker_username, + target_docker_tag_prefix, + "target", + ) if bucketfs_password is None: bucketfs_password = getpass.getpass( - "BucketFS Password for BucketFS %s and User %s:" % (bucketfs_name, bucketfs_username)) + "BucketFS Password for BucketFS {} and User {}:".format( + bucketfs_name, bucketfs_username + ) + ) def root_task_generator() -> DependencyLoggerBaseTask: - return generate_root_task(task_class=UploadContainers, - flavor_paths=list(flavor_path), - release_goals=list(release_goal), - database_host=database_host, - bucketfs_port=bucketfs_port, - bucketfs_username=bucketfs_username, - bucketfs_password=bucketfs_password, - bucket_name=bucket_name, - path_in_bucket=path_in_bucket, - bucketfs_https=bucketfs_https, - release_name=release_name, - bucketfs_name=bucketfs_name) + return generate_root_task( + task_class=UploadContainers, + flavor_paths=list(flavor_path), + release_goals=list(release_goal), + database_host=database_host, + bucketfs_port=bucketfs_port, + bucketfs_username=bucketfs_username, + bucketfs_password=bucketfs_password, + bucket_name=bucket_name, + path_in_bucket=path_in_bucket, + bucketfs_https=bucketfs_https, + release_name=release_name, + bucketfs_name=bucketfs_name, + ) - return run_task(root_task_generator, - workers=workers, - task_dependencies_dot_file=task_dependencies_dot_file, - log_level=log_level, - use_job_specific_log_file=use_job_specific_log_file - ) + return run_task( + root_task_generator, + workers=workers, + task_dependencies_dot_file=task_dependencies_dot_file, + log_level=log_level, + use_job_specific_log_file=use_job_specific_log_file, + ) diff --git a/exasol_script_languages_container_tool/lib/tasks/build/docker_build.py b/exasol_script_languages_container_tool/lib/tasks/build/docker_build.py index 2e1ea558..4c1e16f0 100644 --- a/exasol_script_languages_container_tool/lib/tasks/build/docker_build.py +++ b/exasol_script_languages_container_tool/lib/tasks/build/docker_build.py @@ -1,11 +1,19 @@ import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.base.pickle_target import PickleTarget +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.base.pickle_target import ( + PickleTarget, +) from luigi import Config from luigi.format import Nop -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) class DockerBuildParameter(Config): @@ -24,7 +32,7 @@ def register_required(self): self._images_futures = self.register_dependencies(tasks) def run_task(self): - image_info = (self.get_values_from_futures(self._images_futures)) + image_info = self.get_values_from_futures(self._images_futures) self.return_object(image_info) diff --git a/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_build_base.py b/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_build_base.py index d331a1a2..798fa225 100644 --- a/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_build_base.py +++ b/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_build_base.py @@ -1,11 +1,18 @@ from typing import Dict, Set -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.create.docker_build_base import DockerBuildBase -from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_analyze_task import \ - DockerAnalyzeImageTask +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.create.docker_build_base import ( + DockerBuildBase, +) +from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_analyze_task import ( + DockerAnalyzeImageTask, +) -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import DockerFlavorAnalyzeImageTask +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import ( + DockerFlavorAnalyzeImageTask, +) class DockerFlavorBuildBase(FlavorBaseTask, DockerBuildBase): @@ -13,11 +20,18 @@ class DockerFlavorBuildBase(FlavorBaseTask, DockerBuildBase): # TODO order pull for images which share dependencies def get_goal_class_map(self) -> Dict[str, DockerAnalyzeImageTask]: - module_name_for_build_steps = self.flavor_path.replace("/", "_").replace(".", "_") - available_tasks = [self.create_child_task_with_common_params(subclass) - for subclass - in DockerFlavorAnalyzeImageTask.__subclasses__() - if subclass.__module__ == module_name_for_build_steps] + module_name_for_build_steps = ( + self.flavor_path.replace( # pylint: disable=no-member + "/", "_" + ).replace( # pylint: disable=no-member + ".", "_" + ) + ) + available_tasks = [ + self.create_child_task_with_common_params(subclass) + for subclass in DockerFlavorAnalyzeImageTask.__subclasses__() + if subclass.__module__ == module_name_for_build_steps + ] goal_class_map = {task.get_build_step(): task for task in available_tasks} return goal_class_map diff --git a/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_image_task.py b/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_image_task.py index b1fbf36b..bfc2ed6b 100644 --- a/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_image_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/build/docker_flavor_image_task.py @@ -1,12 +1,19 @@ from pathlib import Path from typing import Dict -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.config.build_config import build_config -from exasol_integration_test_docker_environment.lib.config.docker_config import source_docker_repository_config, \ - target_docker_repository_config -from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_analyze_task import \ - DockerAnalyzeImageTask +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.config.build_config import ( + build_config, +) +from exasol_integration_test_docker_environment.lib.config.docker_config import ( + source_docker_repository_config, + target_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_analyze_task import ( + DockerAnalyzeImageTask, +) class DockerFlavorAnalyzeImageTask(DockerAnalyzeImageTask, FlavorBaseTask): @@ -15,20 +22,22 @@ class DockerFlavorAnalyzeImageTask(DockerAnalyzeImageTask, FlavorBaseTask): # if this would have parameters instead of abstract methods def __init__(self, *args, **kwargs): - self.build_step = self.get_build_step() - self.additional_build_directories_mapping = self.get_additional_build_directories_mapping() + self.build_step = ( # pylint: disable=assignment-from-no-return + self.get_build_step() + ) + self.additional_build_directories_mapping = ( + self.get_additional_build_directories_mapping() + ) super().__init__(*args, **kwargs) def is_rebuild_requested(self) -> bool: config = build_config() - return ( - config.force_rebuild and - ( - self.get_build_step() in config.force_rebuild_from or - len(config.force_rebuild_from) == 0 - )) - - def get_build_step(self) -> str: + return config.force_rebuild and ( + self.get_build_step() in config.force_rebuild_from + or len(config.force_rebuild_from) == 0 + ) + + def get_build_step(self) -> str: # type: ignore """ Called by the constructor to get the name of build step. Sub classes need to implement this method. @@ -63,19 +72,23 @@ def get_target_repository_name(self) -> str: def get_source_image_tag(self): if source_docker_repository_config().tag_prefix != "": - return f"{source_docker_repository_config().tag_prefix}_{self.get_image_tag()}" + return ( + f"{source_docker_repository_config().tag_prefix}_{self.get_image_tag()}" + ) else: return f"{self.get_image_tag()}" def get_target_image_tag(self): if target_docker_repository_config().tag_prefix != "": - return f"{target_docker_repository_config().tag_prefix}_{self.get_image_tag()}" + return ( + f"{target_docker_repository_config().tag_prefix}_{self.get_image_tag()}" + ) else: return f"{self.get_image_tag()}" def get_image_tag(self) -> str: flavor_name = self.get_flavor_name() - return "%s-%s" % (flavor_name, self.build_step) + return f"{flavor_name}-{self.build_step}" def get_mapping_of_build_files_and_directories(self) -> Dict[str, str]: build_step_path = self.get_build_step_path() @@ -84,7 +97,9 @@ def get_mapping_of_build_files_and_directories(self) -> Dict[str, str]: return result def get_build_step_path(self): - path_in_flavor = self.get_path_in_flavor() + path_in_flavor = ( # pylint: disable=assignment-from-none + self.get_path_in_flavor() + ) if path_in_flavor is None: build_step_path_in_flavor = Path(self.build_step) else: diff --git a/exasol_script_languages_container_tool/lib/tasks/clean/clean_images.py b/exasol_script_languages_container_tool/lib/tasks/clean/clean_images.py index 7021ba1f..8b05a1e2 100644 --- a/exasol_script_languages_container_tool/lib/tasks/clean/clean_images.py +++ b/exasol_script_languages_container_tool/lib/tasks/clean/clean_images.py @@ -1,9 +1,18 @@ import luigi -from exasol_integration_test_docker_environment.lib.base.docker_base_task import DockerBaseTask -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask, FlavorsBaseTask -from exasol_integration_test_docker_environment.lib.config.docker_config import target_docker_repository_config - -from exasol_script_languages_container_tool.lib.utils.docker_utils import find_images_by_tag +from exasol_integration_test_docker_environment.lib.base.docker_base_task import ( + DockerBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, + FlavorsBaseTask, +) +from exasol_integration_test_docker_environment.lib.config.docker_config import ( + target_docker_repository_config, +) + +from exasol_script_languages_container_tool.lib.utils.docker_utils import ( + find_images_by_tag, +) class CleanImageTask(DockerBaseTask): @@ -14,8 +23,10 @@ def __init__(self, *args, **kwargs): def run_task(self): self.logger.info("Try to remove dependent images of %s" % self.image_id) - yield from self.run_dependencies(self.get_clean_image_tasks_for_dependent_images()) - for i in range(3): + yield from self.run_dependencies( + self.get_clean_image_tasks_for_dependent_images() + ) + for _ in range(3): try: with self._get_docker_client() as docker_client: self.logger.info("Try to remove image %s" % self.image_id) @@ -23,19 +34,29 @@ def run_task(self): self.logger.info("Removed image %s" % self.image_id) break except Exception as e: - self.logger.info("Could not removed image %s got exception %s" % (self.image_id, e)) + self.logger.info( + "Could not removed image {} got exception {}".format( + self.image_id, e + ) + ) def get_clean_image_tasks_for_dependent_images(self): with self._get_docker_client() as docker_client: - image_ids = [str(possible_child).replace("sha256:", "") for possible_child - in docker_client.api.images(all=True, quiet=True) - if self.is_child_image(possible_child, docker_client)] - return [self.create_child_task(CleanImageTask, image_id=image_id) - for image_id in image_ids] + image_ids = [ + str(possible_child).replace("sha256:", "") + for possible_child in docker_client.api.images(all=True, quiet=True) + if self.is_child_image(possible_child, docker_client) + ] + return [ + self.create_child_task(CleanImageTask, image_id=image_id) + for image_id in image_ids + ] def is_child_image(self, possible_child, docker_client): try: - inspect = docker_client.api.inspect_image(image=str(possible_child).replace("sha256:", "")) + inspect = docker_client.api.inspect_image( + image=str(possible_child).replace("sha256:", "") + ) return str(inspect["Parent"]).replace("sha256:", "") == self.image_id except Exception: return False @@ -46,14 +67,24 @@ class CleanImagesStartingWith(DockerBaseTask): def register_required(self): with self._get_docker_client() as docker_client: - image_ids = [str(image.id).replace("sha256:", "") - for image in self.find_images_to_clean(docker_client)] - self.register_dependencies([self.create_child_task(CleanImageTask, image_id=image_id) - for image_id in image_ids]) + image_ids = [ + str(image.id).replace("sha256:", "") + for image in self.find_images_to_clean(docker_client) + ] + self.register_dependencies( + [ + self.create_child_task(CleanImageTask, image_id=image_id) + for image_id in image_ids + ] + ) def find_images_to_clean(self, docker_client): - self.logger.info("Going to remove all images starting with %s" % self.starts_with_pattern) - filter_images = find_images_by_tag(docker_client, lambda tag: tag.startswith(self.starts_with_pattern)) + self.logger.info( + "Going to remove all images starting with %s" % self.starts_with_pattern + ) + filter_images = find_images_by_tag( + docker_client, lambda tag: tag.startswith(self.starts_with_pattern) + ) for i in filter_images: self.logger.info("Going to remove following image: %s" % i.tags) return filter_images @@ -73,9 +104,12 @@ def register_required(self): raise Exception("docker repository name must not be an empty string") flavor_name_extension = ":%s" % flavor_name - starts_with_pattern = target_docker_repository_config().repository_name + \ - flavor_name_extension - task = self.create_child_task(CleanImagesStartingWith, starts_with_pattern=starts_with_pattern) + starts_with_pattern = ( + target_docker_repository_config().repository_name + flavor_name_extension + ) + task = self.create_child_task( + CleanImagesStartingWith, starts_with_pattern=starts_with_pattern + ) self.register_dependency(task) def run_task(self): @@ -85,8 +119,10 @@ def run_task(self): class CleanExaslcFlavorsImages(FlavorsBaseTask): def register_required(self): - for flavor_path in self.flavor_paths: - task = self.create_child_task(CleanExaslcFlavorImages, flavor_path=flavor_path) + for flavor_path in self.flavor_paths: # pylint: disable=not-an-iterable + task = self.create_child_task( + CleanExaslcFlavorImages, flavor_path=flavor_path + ) self.register_dependency(task) def run_task(self): @@ -97,7 +133,9 @@ class CleanExaslcAllImages(DockerBaseTask): def register_required(self): starts_with_pattern = target_docker_repository_config().repository_name - task = self.create_child_task(CleanImagesStartingWith, starts_with_pattern=starts_with_pattern) + task = self.create_child_task( + CleanImagesStartingWith, starts_with_pattern=starts_with_pattern + ) self.register_dependency(task) def run_task(self): diff --git a/exasol_script_languages_container_tool/lib/tasks/export/create_export_directory.py b/exasol_script_languages_container_tool/lib/tasks/export/create_export_directory.py index 9c8ad959..56f08d89 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/create_export_directory.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/create_export_directory.py @@ -1,6 +1,8 @@ from pathlib import Path -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) class CreateExportDirectory(DependencyLoggerBaseTask): diff --git a/exasol_script_languages_container_tool/lib/tasks/export/export_container_base_task.py b/exasol_script_languages_container_tool/lib/tasks/export/export_container_base_task.py index 27ec8db9..1d7a2456 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/export_container_base_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/export_container_base_task.py @@ -8,21 +8,35 @@ import humanfriendly import luigi - -from exasol_script_languages_container_tool.lib.tasks.export.create_export_directory import CreateExportDirectory -from exasol_script_languages_container_tool.lib.tasks.export.export_info import ExportInfo from exasol_integration_test_docker_environment.lib.base.base_task import BaseTask -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.still_running_logger import StillRunningLogger -from exasol_integration_test_docker_environment.lib.config.build_config import build_config -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo -from exasol_integration_test_docker_environment.lib.logging.command_log_handler import CommandLogHandler +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.still_running_logger import ( + StillRunningLogger, +) +from exasol_integration_test_docker_environment.lib.config.build_config import ( + build_config, +) +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) +from exasol_integration_test_docker_environment.lib.logging.command_log_handler import ( + CommandLogHandler, +) + +from exasol_script_languages_container_tool.lib.tasks.export.create_export_directory import ( + CreateExportDirectory, +) +from exasol_script_languages_container_tool.lib.tasks.export.export_info import ( + ExportInfo, +) CHECKSUM_ALGORITHM = "sha512sum" class ExportContainerBaseTask(FlavorBaseTask): - logger = logging.getLogger('luigi-interface') + logger = logging.getLogger("luigi-interface") export_path = luigi.OptionalParameter(None) release_name = luigi.OptionalParameter(None) release_goal = luigi.Parameter(None) @@ -33,17 +47,21 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def register_required(self): - self._export_directory_future = \ - self.register_dependency(self.create_child_task(task_class=CreateExportDirectory)) + self._export_directory_future = self.register_dependency( + self.create_child_task(task_class=CreateExportDirectory) + ) self._release_task_future = self.register_dependency(self.get_release_task()) def get_release_task(self) -> BaseTask: pass def run_task(self): - image_info_of_release_image = self._release_task_future.get_output() # type: ImageInfo - cache_file, release_complete_name, release_image_name = \ + image_info_of_release_image = ( # type: ignore + self._release_task_future.get_output() + ) # type: ImageInfo + cache_file, release_complete_name, release_image_name = ( self._get_cache_file_path(image_info_of_release_image) + ) checksum_file = Path(str(cache_file) + "." + CHECKSUM_ALGORITHM) self._remove_cached_exported_file_if_requested(cache_file, checksum_file) @@ -51,15 +69,26 @@ def run_task(self): if not cache_file.exists(): self._export_release(release_image_name, cache_file, checksum_file) is_new = True - output_file = self._copy_cache_file_to_output_path(cache_file, checksum_file, is_new) - export_info = self._create_export_info(image_info_of_release_image, - release_complete_name, - cache_file, is_new, output_file) + output_file = self._copy_cache_file_to_output_path( + cache_file, checksum_file, is_new + ) + export_info = self._create_export_info( + image_info_of_release_image, + release_complete_name, + cache_file, + is_new, + output_file, + ) self.return_object(export_info) - def _create_export_info(self, image_info_of_release_image: ImageInfo, - release_complete_name: str, - cache_file: Path, is_new: bool, output_file: Path): + def _create_export_info( + self, + image_info_of_release_image: ImageInfo, + release_complete_name: str, + cache_file: Path, + is_new: bool, + output_file: Path, + ): export_info = ExportInfo( cache_file=str(cache_file), complete_name=release_complete_name, @@ -69,7 +98,7 @@ def _create_export_info(self, image_info_of_release_image: ImageInfo, depends_on_image=image_info_of_release_image, release_goal=str(self.release_goal), release_name=str(self.release_name), - output_file=str(output_file) + output_file=str(output_file), ) return export_info @@ -80,40 +109,56 @@ def _get_cache_file_path(self, image_info_of_release_image): cache_file = Path(export_path, release_complete_name + ".tar.gz").absolute() return cache_file, release_complete_name, release_image_name - def _copy_cache_file_to_output_path(self, cache_file: Path, checksum_file: Path, is_new: bool): + def _copy_cache_file_to_output_path( + self, cache_file: Path, checksum_file: Path, is_new: bool + ): output_file = None if self.export_path is not None: if self.release_name is not None: suffix = f"""_{self.release_name}""" else: suffix = "" - file_name = f"""{self.get_flavor_name()}_{self.release_goal}{suffix}.tar.gz""" + file_name = ( + f"""{self.get_flavor_name()}_{self.release_goal}{suffix}.tar.gz""" + ) output_file = Path(self.export_path, file_name) - output_checksum_file = Path(self.export_path, file_name + "." + CHECKSUM_ALGORITHM) + output_checksum_file = Path( + self.export_path, file_name + "." + CHECKSUM_ALGORITHM + ) if not output_file.exists() or not output_checksum_file.exists() or is_new: output_file.parent.mkdir(exist_ok=True, parents=True) shutil.copy2(checksum_file, output_checksum_file) shutil.copy2(cache_file, output_file) return output_file - def _remove_cached_exported_file_if_requested(self, release_file: Path, checksum_file: Path): - if release_file.exists() and \ - (build_config().force_rebuild or - build_config().force_pull or - not checksum_file.exists()): + def _remove_cached_exported_file_if_requested( + self, release_file: Path, checksum_file: Path + ): + if release_file.exists() and ( + build_config().force_rebuild + or build_config().force_pull + or not checksum_file.exists() + ): self.logger.info("Removed container file %s", release_file) os.remove(release_file) if checksum_file.exists(): os.remove(checksum_file) - def _export_release(self, release_image_name: str, release_file: Path, checksum_file: Path): + def _export_release( + self, release_image_name: str, release_file: Path, checksum_file: Path + ): self.logger.info("Create container file %s", release_file) - temp_directory = tempfile.mkdtemp(prefix="release_archive_", - dir=build_config().temporary_base_directory) + temp_directory = tempfile.mkdtemp( + prefix="release_archive_", dir=build_config().temporary_base_directory + ) try: log_path = self.get_log_path() - export_file = self._create_and_export_container(release_image_name, temp_directory) - extract_dir = self._extract_exported_container(log_path, export_file, temp_directory) + export_file = self._create_and_export_container( + release_image_name, temp_directory + ) + extract_dir = self._extract_exported_container( + log_path, export_file, temp_directory + ) self._modify_extracted_container(extract_dir) self._pack_release_file(log_path, extract_dir, release_file) self._compute_checksum(release_file, checksum_file) @@ -123,28 +168,35 @@ def _export_release(self, release_image_name: str, release_file: Path, checksum_ def _compute_checksum(self, release_file: Path, checksum_file: Path): self.logger.info("Compute checksum for container file %s", release_file) command = f"""{CHECKSUM_ALGORITHM} '{release_file}'""" - completed_process = subprocess.run(shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE) + completed_process = subprocess.run(shlex.split(command), capture_output=True) completed_process.check_returncode() stdout = completed_process.stdout.decode("utf-8") stdout = stdout.replace(str(release_file), release_file.name) with open(checksum_file, "w") as f: f.write(stdout) - def _create_and_export_container(self, release_image_name: str, temp_directory: str): + def _create_and_export_container( + self, release_image_name: str, temp_directory: str + ): self.logger.info("Export container %s", release_image_name) with self._get_docker_client() as docker_client: container = docker_client.containers.create(image=release_image_name) try: - return self._export_container(container, release_image_name, temp_directory) + return self._export_container( + container, release_image_name, temp_directory + ) finally: container.remove(force=True) - def _export_container(self, container, release_image_name: str, temp_directory: str): + def _export_container( + self, container, release_image_name: str, temp_directory: str + ): generator = container.export(chunk_size=humanfriendly.parse_size("10mb")) export_file = temp_directory + "/export.tar" with open(export_file, "wb") as file: still_running_logger = StillRunningLogger( - self.logger, "Export image %s" % release_image_name) + self.logger, "Export image %s" % release_image_name + ) for chunk in generator: still_running_logger.log() file.write(chunk) @@ -154,48 +206,82 @@ def _pack_release_file(self, log_path: Path, extract_dir: str, release_file: Pat self.logger.info("Pack container file %s", release_file) extract_content = " ".join(f"'{file}'" for file in os.listdir(extract_dir)) if not str(release_file).endswith("tar.gz"): - raise ValueError(f"Unexpected release file: '{release_file}'. Expected suffix 'tar.gz'.") - tmp_release_file = release_file.with_suffix("") #cut off ".gz" from ".tar.gz" - command = f"""tar -C '{extract_dir}' -vcf '{tmp_release_file}' {extract_content}""" - self.run_command(command, f"packing container file {tmp_release_file}", - log_path.joinpath("pack_release_file.log")) + raise ValueError( + f"Unexpected release file: '{release_file}'. Expected suffix 'tar.gz'." + ) + tmp_release_file = release_file.with_suffix("") # cut off ".gz" from ".tar.gz" + command = ( + f"""tar -C '{extract_dir}' -vcf '{tmp_release_file}' {extract_content}""" + ) + self.run_command( + command, + f"packing container file {tmp_release_file}", + log_path.joinpath("pack_release_file.log"), + ) manifest_file = os.path.join(extract_dir, "exasol-manifest.json") with open(manifest_file, "w") as f: print("{}", file=f) - command = f"""tar -C '{extract_dir}' -rvf '{tmp_release_file}' exasol-manifest.json""" - self.run_command(command, f"adding manifest to '{tmp_release_file}'", - log_path.joinpath("pack_release_file.log")) + command = ( + f"""tar -C '{extract_dir}' -rvf '{tmp_release_file}' exasol-manifest.json""" + ) + self.run_command( + command, + f"adding manifest to '{tmp_release_file}'", + log_path.joinpath("pack_release_file.log"), + ) command = f"""gzip {tmp_release_file}""" - self.run_command(command, f"Creating '{release_file}'", - log_path.joinpath("pack_release_file.log")) + self.run_command( + command, + f"Creating '{release_file}'", + log_path.joinpath("pack_release_file.log"), + ) @staticmethod def _modify_extracted_container(extract_dir: str): os.symlink("/conf/resolv.conf", f"""{extract_dir}/etc/resolv.conf""") os.symlink("/conf/hosts", f"""{extract_dir}/etc/hosts""") - def _extract_exported_container(self, log_path: Path, export_file: str, temp_directory: str): + def _extract_exported_container( + self, log_path: Path, export_file: str, temp_directory: str + ): self.logger.info("Extract exported file %s", export_file) extract_dir = temp_directory + "/extract" os.makedirs(extract_dir) excludes = " ".join( - ["--exclude='%s'" % directory for directory in - ["dev/*", "proc/*", "etc/resolv.conf", "etc/hosts", "var/cache/apt", "var/lib/apt", "var/lib/dpkg"]]) - self.run_command(f"""tar {excludes} -xvf '{export_file}' -C '{extract_dir}'""", - "extracting exported container %s" % export_file, - log_path.joinpath("extract_release_file.log")) + [ + "--exclude='%s'" % directory + for directory in [ + "dev/*", + "proc/*", + "etc/resolv.conf", + "etc/hosts", + "var/cache/apt", + "var/lib/apt", + "var/lib/dpkg", + ] + ] + ) + self.run_command( + f"""tar {excludes} -xvf '{export_file}' -C '{extract_dir}'""", + "extracting exported container %s" % export_file, + log_path.joinpath("extract_release_file.log"), + ) return extract_dir def run_command(self, command: str, description: str, log_file_path: Path): - with subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) as process: - with CommandLogHandler(log_file_path, self.logger, description) as log_handler: - still_running_logger = StillRunningLogger( - self.logger, description) + with subprocess.Popen( + shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) as process: + with CommandLogHandler( + log_file_path, self.logger, description + ) as log_handler: + still_running_logger = StillRunningLogger(self.logger, description) log_handler.handle_log_lines((command + "\n").encode("utf-8")) - for line in iter(process.stdout.readline, b''): + for line in iter(process.stdout.readline, b""): # type: ignore still_running_logger.log() log_handler.handle_log_lines(line) process.wait(timeout=60 * 2) return_code_log_line = "return code %s" % process.returncode - log_handler.handle_log_lines(return_code_log_line.encode("utf-8"), process.returncode != 0) + log_handler.handle_log_lines( + return_code_log_line.encode("utf-8"), process.returncode != 0 + ) diff --git a/exasol_script_languages_container_tool/lib/tasks/export/export_container_task.py b/exasol_script_languages_container_tool/lib/tasks/export/export_container_task.py index 6366dadd..7458558e 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/export_container_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/export_container_task.py @@ -1,10 +1,16 @@ import importlib import luigi -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import RequiredTaskInfo +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) +from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import ( + RequiredTaskInfo, +) -from exasol_script_languages_container_tool.lib.tasks.export.export_container_base_task import ExportContainerBaseTask +from exasol_script_languages_container_tool.lib.tasks.export.export_container_base_task import ( + ExportContainerBaseTask, +) class ExportContainerTask(ExportContainerBaseTask): @@ -13,14 +19,23 @@ class ExportContainerTask(ExportContainerBaseTask): # don't want to wait for the push finishing before starting the build of depended images, # but we also need to create a ExportContainerTask for each DockerCreateImageTask of a goal - required_task_info = JsonPickleParameter(RequiredTaskInfo, - visibility=luigi.parameter.ParameterVisibility.HIDDEN, - significant=True) # type: RequiredTaskInfo + required_task_info = JsonPickleParameter( + RequiredTaskInfo, + visibility=luigi.parameter.ParameterVisibility.HIDDEN, + significant=True, + ) # type: RequiredTaskInfo def get_release_task(self): - module = importlib.import_module(self.required_task_info.module_name) - class_ = getattr(module, self.required_task_info.class_name) - return self.create_child_task(task_class=class_, **self.required_task_info.params) + module = importlib.import_module( + self.required_task_info.module_name # pylint: disable=no-member + ) # pylint: disable=no-member + class_ = getattr( + module, self.required_task_info.class_name # pylint: disable=no-member + ) # pylint: disable=no-member + return self.create_child_task( + task_class=class_, + **self.required_task_info.params # pylint: disable=no-member + ) def get_release_goal(self): return self.release_goal diff --git a/exasol_script_languages_container_tool/lib/tasks/export/export_container_tasks_creator.py b/exasol_script_languages_container_tool/lib/tasks/export/export_container_tasks_creator.py index 11e2e0d5..367276df 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/export_container_tasks_creator.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/export_container_tasks_creator.py @@ -1,11 +1,16 @@ from typing import Dict from exasol_integration_test_docker_environment.lib.base.base_task import BaseTask -from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_create_task import \ - DockerCreateImageTask -from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import RequiredTaskInfo +from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_create_task import ( + DockerCreateImageTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import ( + RequiredTaskInfo, +) -from exasol_script_languages_container_tool.lib.tasks.export.export_container_task import ExportContainerTask +from exasol_script_languages_container_tool.lib.tasks.export.export_container_task import ( + ExportContainerTask, +) class ExportContainerTasksCreator: @@ -14,26 +19,30 @@ def __init__(self, task: BaseTask, export_path: str): self.export_path = export_path self.task = task - def create_export_tasks(self, build_tasks: Dict[str, DockerCreateImageTask]) \ - -> Dict[str, ExportContainerTask]: - return {release_goal: self._create_export_task(release_goal, build_task) - for release_goal, build_task in build_tasks.items()} - - def _create_export_task(self, release_goal: str, - build_task: DockerCreateImageTask) -> ExportContainerTask: + def create_export_tasks( + self, build_tasks: Dict[str, DockerCreateImageTask] + ) -> Dict[str, ExportContainerTask]: + return { + release_goal: self._create_export_task(release_goal, build_task) + for release_goal, build_task in build_tasks.items() + } + + def _create_export_task( + self, release_goal: str, build_task: DockerCreateImageTask + ) -> ExportContainerTask: required_task_info = self._create_required_task_info(build_task) - return \ - self.task.create_child_task_with_common_params( - ExportContainerTask, - export_path=self.export_path, - required_task_info=required_task_info, - release_goal=release_goal, - ) + return self.task.create_child_task_with_common_params( + ExportContainerTask, + export_path=self.export_path, + required_task_info=required_task_info, + release_goal=release_goal, + ) @staticmethod def _create_required_task_info(build_task) -> RequiredTaskInfo: - required_task_info = \ - RequiredTaskInfo(module_name=build_task.__module__, - class_name=build_task.__class__.__name__, - params=build_task.param_kwargs) + required_task_info = RequiredTaskInfo( + module_name=build_task.__module__, + class_name=build_task.__class__.__name__, + params=build_task.param_kwargs, + ) return required_task_info diff --git a/exasol_script_languages_container_tool/lib/tasks/export/export_containers.py b/exasol_script_languages_container_tool/lib/tasks/export/export_containers.py index 23f8eeb1..5e6ae0f0 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/export_containers.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/export_containers.py @@ -2,14 +2,24 @@ from typing import Dict import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) from exasol_integration_test_docker_environment.lib.base.info import Info -from exasol_integration_test_docker_environment.lib.config.build_config import build_config +from exasol_integration_test_docker_environment.lib.config.build_config import ( + build_config, +) from luigi import Config -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase -from exasol_script_languages_container_tool.lib.tasks.export.export_container_tasks_creator import ExportContainerTasksCreator -from exasol_script_languages_container_tool.lib.tasks.export.export_info import ExportInfo +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) +from exasol_script_languages_container_tool.lib.tasks.export.export_container_tasks_creator import ( + ExportContainerTasksCreator, +) +from exasol_script_languages_container_tool.lib.tasks.export.export_info import ( + ExportInfo, +) class ExportContainerParameter(Config): @@ -20,7 +30,11 @@ class ExportContainerParameter(Config): class ExportContainerResult(Info): - def __init__(self, export_infos: Dict[str,Dict[str,ExportInfo]], command_line_output_path: Path): + def __init__( + self, + export_infos: Dict[str, Dict[str, ExportInfo]], + command_line_output_path: Path, + ): self.export_infos = export_infos self.command_line_output_path = command_line_output_path @@ -30,19 +44,27 @@ class ExportContainers(FlavorsBaseTask, ExportContainerParameter): def __init__(self, *args, **kwargs): self.export_info_futures = None super().__init__(*args, **kwargs) - command_line_output_path = self.get_output_path().joinpath("command_line_output") - self.command_line_output_target = luigi.LocalTarget(str(command_line_output_path)) + command_line_output_path = self.get_output_path().joinpath( + "command_line_output" + ) + self.command_line_output_target = luigi.LocalTarget( + str(command_line_output_path) + ) def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - ExportFlavorContainer) # type: Dict[str,ExportFlavorContainer] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + ExportFlavorContainer + ) # type: Dict[str,ExportFlavorContainer] self.export_info_futures = self.register_dependencies(tasks) def run_task(self): - export_infos = self.get_values_from_futures( - self.export_info_futures) # type: Dict[str,Dict[str,ExportInfo]] + export_infos = self.get_values_from_futures( # type: ignore + self.export_info_futures + ) # type: Dict[str,Dict[str,ExportInfo]] self.write_command_line_output(export_infos) - result = ExportContainerResult(export_infos, Path(self.command_line_output_target.path)) + result = ExportContainerResult( + export_infos, Path(self.command_line_output_target.path) + ) self.return_object(result) def write_command_line_output(self, export_infos: Dict[str, Dict[str, ExportInfo]]): @@ -55,8 +77,13 @@ def write_command_line_output(self, export_infos: Dict[str, Dict[str, ExportInfo out_file.write("Cached container under %s" % export_info.cache_file) out_file.write("\n") out_file.write("\n") - if export_info.output_file is not None and export_info.output_file != "None": - out_file.write("Copied container to %s" % export_info.output_file) + if ( + export_info.output_file is not None + and export_info.output_file != "None" + ): + out_file.write( + "Copied container to %s" % export_info.output_file + ) out_file.write("\n") out_file.write("\n") out_file.write("=================================================") diff --git a/exasol_script_languages_container_tool/lib/tasks/export/export_info.py b/exasol_script_languages_container_tool/lib/tasks/export/export_info.py index 68352c6e..457d314a 100644 --- a/exasol_script_languages_container_tool/lib/tasks/export/export_info.py +++ b/exasol_script_languages_container_tool/lib/tasks/export/export_info.py @@ -1,12 +1,25 @@ +from typing import Optional + from exasol_integration_test_docker_environment.lib.base.info import Info -from exasol_integration_test_docker_environment.lib.docker.images.image_info import ImageInfo +from exasol_integration_test_docker_environment.lib.docker.images.image_info import ( + ImageInfo, +) class ExportInfo(Info): - def __init__(self, cache_file: str, complete_name: str, name: str, _hash: str, is_new: bool, - release_goal: str, depends_on_image: ImageInfo, - output_file: str = None, release_name: str = None): + def __init__( + self, + cache_file: str, + complete_name: str, + name: str, + _hash: str, + is_new: bool, + release_goal: str, + depends_on_image: ImageInfo, + output_file: Optional[str] = None, + release_name: Optional[str] = None, + ): self.release_name = release_name self.output_file = output_file self.release_goal = release_goal diff --git a/exasol_script_languages_container_tool/lib/tasks/install_starter_scripts/run_starter_script_installation.py b/exasol_script_languages_container_tool/lib/tasks/install_starter_scripts/run_starter_script_installation.py index fda19948..7535bcb8 100644 --- a/exasol_script_languages_container_tool/lib/tasks/install_starter_scripts/run_starter_script_installation.py +++ b/exasol_script_languages_container_tool/lib/tasks/install_starter_scripts/run_starter_script_installation.py @@ -2,6 +2,7 @@ import shutil import sys from pathlib import Path + import importlib_metadata import importlib_resources @@ -10,17 +11,23 @@ MODULE_IDENTITY = PACKAGE_IDENTITY.replace("-", "_") -def run_starter_script_installation(install_path: Path, target_script_path: Path, force_install: bool): +def run_starter_script_installation( + install_path: Path, target_script_path: Path, force_install: bool +): print(f"Installing to {install_path}") if target_script_path.exists() and not force_install: - print(f"The installation directory for exaslct at {install_path} already exists.") + print( + f"The installation directory for exaslct at {install_path} already exists." + ) print("Do you want to remove it and continue with installation?") answer = input("yes/no: ") if answer == "yes": shutil.rmtree(target_script_path) else: - print("Can't continue with the installation, because the installation directory already exists.") + print( + "Can't continue with the installation, because the installation directory already exists." + ) sys.exit(1) elif force_install: shutil.rmtree(target_script_path) @@ -34,34 +41,48 @@ def run_starter_script_installation(install_path: Path, target_script_path: Path for script in starter_script_dir.iterdir(): with importlib_resources.as_file(script) as script_file: print(f"Copying {script_file} to {target_script_path / script_file.name}") - shutil.copyfile(script_file, target_script_path / script_file.name) + shutil.copyfile(script_file, target_script_path / script_file.name) - exaslct_script_path = importlib_resources.files(MODULE_IDENTITY) / "starter_scripts/exaslct_install_template.sh" + exaslct_script_path = ( + importlib_resources.files(MODULE_IDENTITY) + / "starter_scripts/exaslct_install_template.sh" + ) exaslct_script = exaslct_script_path.read_text() exaslct_script = exaslct_script.replace("<<<>>>", version) with open(target_script_path / "exaslct.sh", "w") as exaslct_file: exaslct_file.write(exaslct_script) - os.chmod(target_script_path / "exaslct.sh", 0o0764) #Full access (rwx) for owner, for group only r/w access, readonly fo rall others + os.chmod( + target_script_path / "exaslct.sh", 0o0764 + ) # Full access (rwx) for owner, for group only r/w access, readonly fo rall others exaslct_symlink_path = install_path / "exaslct" - exaslct_symlin_exists = exaslct_symlink_path.is_symlink() or exaslct_symlink_path.exists() or exaslct_symlink_path.is_file() + exaslct_symlin_exists = ( + exaslct_symlink_path.is_symlink() + or exaslct_symlink_path.exists() + or exaslct_symlink_path.is_file() + ) if exaslct_symlin_exists and not force_install: - print(f"The path for the symlink to exaslct at {exaslct_symlink_path} already exists.") + print( + f"The path for the symlink to exaslct at {exaslct_symlink_path} already exists." + ) print("Do you want to remove it and continue with installation?") answer = input("yes/no:") if answer == "yes": exaslct_symlink_path.unlink() else: - print("Can't continue with the installation, because the path to exaslct symlink already exists.") - print("You can change the path to exaslct symlink by setting the environment variable EXASLCT_SYM_LINK_PATH") + print( + "Can't continue with the installation, because the path to exaslct symlink already exists." + ) + print( + "You can change the path to exaslct symlink by setting the environment variable EXASLCT_SYM_LINK_PATH" + ) sys.exit(1) elif force_install: exaslct_symlink_path.unlink() exaslct_symlink_path.symlink_to(target_script_path / "exaslct.sh") - diff --git a/exasol_script_languages_container_tool/lib/tasks/push/docker_push.py b/exasol_script_languages_container_tool/lib/tasks/push/docker_push.py index b8d0cef8..35099098 100644 --- a/exasol_script_languages_container_tool/lib/tasks/push/docker_push.py +++ b/exasol_script_languages_container_tool/lib/tasks/push/docker_push.py @@ -1,12 +1,19 @@ from typing import Dict import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.push.docker_push_parameter import DockerPushParameter -from exasol_integration_test_docker_environment.lib.docker.images.push.push_task_creator_for_build_tasks import \ - PushTaskCreatorFromBuildTasks +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.push.docker_push_parameter import ( + DockerPushParameter, +) +from exasol_integration_test_docker_environment.lib.docker.images.push.push_task_creator_for_build_tasks import ( + PushTaskCreatorFromBuildTasks, +) -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) class DockerFlavorsPush(FlavorsBaseTask, DockerPushParameter): @@ -16,10 +23,10 @@ def __init__(self, *args, **kwargs): self.image_info_futures = None super().__init__(*args, **kwargs) - def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - DockerFlavorPush) # type: Dict[str,DockerFlavorPush] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + DockerFlavorPush + ) # type: Dict[str,DockerFlavorPush] self.image_info_futures = self.register_dependencies(tasks) def run_task(self): diff --git a/exasol_script_languages_container_tool/lib/tasks/save/docker_save.py b/exasol_script_languages_container_tool/lib/tasks/save/docker_save.py index a1b434dd..5aef198c 100644 --- a/exasol_script_languages_container_tool/lib/tasks/save/docker_save.py +++ b/exasol_script_languages_container_tool/lib/tasks/save/docker_save.py @@ -1,11 +1,18 @@ from typing import Dict -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask -from exasol_integration_test_docker_environment.lib.docker.images.save.save_task_creator_for_build_tasks import \ - SaveTaskCreatorFromBuildTasks +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.save.save_task_creator_for_build_tasks import ( + SaveTaskCreatorFromBuildTasks, +) -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase -from exasol_script_languages_container_tool.lib.tasks.save.docker_save_parameter import DockerSaveParameter +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) +from exasol_script_languages_container_tool.lib.tasks.save.docker_save_parameter import ( + DockerSaveParameter, +) class DockerSave(FlavorsBaseTask, DockerSaveParameter): @@ -14,8 +21,9 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - DockerFlavorSave) # type: Dict[str,DockerFlavorSave] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + DockerFlavorSave + ) # type: Dict[str,DockerFlavorSave] self.image_info_futures = self.register_dependencies(tasks) def run_task(self): diff --git a/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan.py b/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan.py index 82d4d294..e7e2f530 100644 --- a/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan.py +++ b/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan.py @@ -1,21 +1,26 @@ +import tarfile from pathlib import Path from typing import Dict import luigi - -import tarfile - -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask -from exasol_integration_test_docker_environment.lib.base.info import Info -from exasol_integration_test_docker_environment.lib.config.build_config import build_config - -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase - -from exasol_script_languages_container_tool.lib.tasks.security_scan.security_scan_parameter import SecurityScanParameter - from docker.models.containers import Container +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.info import Info +from exasol_integration_test_docker_environment.lib.config.build_config import ( + build_config, +) -from exasol_script_languages_container_tool.lib.utils.tar_safe_extract import safe_extract +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) +from exasol_script_languages_container_tool.lib.tasks.security_scan.security_scan_parameter import ( + SecurityScanParameter, +) +from exasol_script_languages_container_tool.lib.utils.tar_safe_extract import ( + safe_extract, +) class ScanResult(Info): @@ -34,16 +39,20 @@ def __init__(self, *args, **kwargs): self.security_report_target = luigi.LocalTarget(str(report_path)) def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - SecurityScanner, report_path=self.report_path) # type: Dict[str,SecurityScanner] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + SecurityScanner, report_path=self.report_path + ) # type: Dict[str,SecurityScanner] self.security_scanner_futures = self.register_dependencies(tasks) def run_task(self): security_scanner_results = self.get_values_from_futures( - self.security_scanner_futures) + self.security_scanner_futures + ) self.write_report(security_scanner_results) - all_result = AllScanResult(security_scanner_results, Path(self.security_report_target.path)) + all_result = AllScanResult( + security_scanner_results, Path(self.security_report_target.path) + ) self.return_object(all_result) def write_report(self, security_scanner: Dict[str, ScanResult]): @@ -51,14 +60,18 @@ def write_report(self, security_scanner: Dict[str, ScanResult]): for key, value in security_scanner.items(): out_file.write("\n") - out_file.write(f"============ START SECURITY SCAN REPORT - <{key}> ====================") + out_file.write( + f"============ START SECURITY SCAN REPORT - <{key}> ====================" + ) out_file.write("\n") out_file.write(f"Successful:{value.is_ok}\n") out_file.write(f"Full report:{value.report_dir}\n") out_file.write(f"Summary:\n") out_file.write(value.summary) out_file.write("\n") - out_file.write(f"============ END SECURITY SCAN REPORT - <{key}> ====================") + out_file.write( + f"============ END SECURITY SCAN REPORT - <{key}> ====================" + ) out_file.write("\n") @@ -82,44 +95,62 @@ def run_task(self): result = ScanResult(is_ok=False, summary="", report_dir=report_path_abs) assert len(task_results.values()) == 1 for task_result in task_results.values(): - self.logger.info(f"Running security run on image: {task_result.get_target_complete_name()}, report path: " - f"{report_path_abs}") + self.logger.info( + f"Running security run on image: {task_result.get_target_complete_name()}, report path: " + f"{report_path_abs}" + ) report_local_path = "/report" with self._get_docker_client() as docker_client: - result_container = docker_client.containers.run(task_result.get_target_complete_name(), - command=report_local_path, - detach=True, stderr=True) + result_container = docker_client.containers.run( + task_result.get_target_complete_name(), + command=report_local_path, + detach=True, + stderr=True, + ) try: logs = result_container.logs(follow=True).decode("UTF-8") result_container_result = result_container.wait() - #We don't use mount binding here to exchange the report files, but download them from the container - #Thus we avoid that the files are created by root - self._write_report(result_container, report_path_abs, report_local_path) - result = ScanResult(is_ok=(result_container_result["StatusCode"] == 0), - summary=logs, report_dir=report_path_abs) + # We don't use mount binding here to exchange the report files, but download them from the container + # Thus we avoid that the files are created by root + self._write_report( + result_container, report_path_abs, report_local_path + ) + result = ScanResult( + is_ok=(result_container_result["StatusCode"] == 0), + summary=logs, + report_dir=report_path_abs, + ) finally: result_container.remove() self.return_object(result) - def _write_report(self, container: Container, report_path_abs: Path, report_local_path: str): - tar_file_path = report_path_abs / 'report.tar' - with open(tar_file_path, 'wb') as tar_file: + def _write_report( + self, container: Container, report_path_abs: Path, report_local_path: str + ): + tar_file_path = report_path_abs / "report.tar" + with open(tar_file_path, "wb") as tar_file: bits, stat = container.get_archive(report_local_path) for chunk in bits: tar_file.write(chunk) with tarfile.open(tar_file_path) as tar_file: - safe_extract(tar_file, path=report_path_abs) + safe_extract(tar_file, path=report_path_abs) # type: ignore class AllScanResult(Info): - def __init__(self, scan_results_per_flavor: Dict[str, ScanResult], report_path: Path): + def __init__( + self, scan_results_per_flavor: Dict[str, ScanResult], report_path: Path + ): self.scan_results_per_flavor = scan_results_per_flavor - self.scans_are_ok = all(scan_result.is_ok - for scan_result - in scan_results_per_flavor.values()) + self.scans_are_ok = all( + scan_result.is_ok for scan_result in scan_results_per_flavor.values() + ) self.report_path = report_path def get_error_scans_msg(self): - return [f"{key}: '{value.summary}'" for key, value in self.scan_results_per_flavor.items() if not value.is_ok] + return [ + f"{key}: '{value.summary}'" + for key, value in self.scan_results_per_flavor.items() + if not value.is_ok + ] diff --git a/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan_parameter.py b/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan_parameter.py index 826ce84b..e0fd73ed 100644 --- a/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan_parameter.py +++ b/exasol_script_languages_container_tool/lib/tasks/security_scan/security_scan_parameter.py @@ -1,5 +1,7 @@ import luigi -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) class SecurityScanParameter(DependencyLoggerBaseTask): diff --git a/exasol_script_languages_container_tool/lib/tasks/test/populate_test_engine.py b/exasol_script_languages_container_tool/lib/tasks/test/populate_test_engine.py index 8d05d6a4..2cd1adbc 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/populate_test_engine.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/populate_test_engine.py @@ -1,9 +1,12 @@ from pathlib import PurePath -from exasol_integration_test_docker_environment.lib.test_environment.database_setup.populate_data import \ - PopulateTestDataToDatabase +from exasol_integration_test_docker_environment.lib.test_environment.database_setup.populate_data import ( + PopulateTestDataToDatabase, +) -from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import TEST_DATA_TARGET +from exasol_script_languages_container_tool.lib.tasks.test.test_container_content import ( + TEST_DATA_TARGET, +) class PopulateTestEngine(PopulateTestDataToDatabase): diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_generic_language_tests.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_generic_language_tests.py index 04a9c5d0..db840d48 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_generic_language_tests.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_generic_language_tests.py @@ -1,36 +1,52 @@ from typing import Any, Generator -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import RunDBTestsInDirectory -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestFoldersResult, \ - RunDBTestDirectoryResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import \ - RunDBGenericLanguageTestParameter, \ - ActualRunDBTestParameter +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import ( + RunDBTestsInDirectory, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestDirectoryResult, + RunDBTestFoldersResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + ActualRunDBTestParameter, + RunDBGenericLanguageTestParameter, +) -class RunDBGenericLanguageTest(FlavorBaseTask, - RunDBGenericLanguageTestParameter, - ActualRunDBTestParameter, - DatabaseCredentialsParameter): +class RunDBGenericLanguageTest( + FlavorBaseTask, + RunDBGenericLanguageTestParameter, + ActualRunDBTestParameter, + DatabaseCredentialsParameter, +): def extend_output_path(self): return self.caller_output_path + ("generic",) def run_task(self): results = [] - for language in self.generic_language_tests: + for language in self.generic_language_tests: # pylint: disable=not-an-iterable test_result = yield from self.run_test(language, "generic") results.append(test_result) test_results = RunDBTestFoldersResult(test_results=results) - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_results, 4) + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_results, 4 + ) self.return_object(test_results) - def run_test(self, language: str, test_folder: str) -> \ - Generator[RunDBTestsInDirectory, Any, RunDBTestDirectoryResult]: + def run_test( + self, language: str, test_folder: str + ) -> Generator[RunDBTestsInDirectory, Any, RunDBTestDirectoryResult]: task = self.create_child_task_with_common_params( RunDBTestsInDirectory, language=language, diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py index d1dab50a..b96cc7d2 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py @@ -1,22 +1,43 @@ from collections import namedtuple +from io import StringIO from pathlib import Path from typing import Optional -from io import StringIO import docker.models.containers import luigi -from exasol_integration_test_docker_environment.lib.config.docker_config import source_docker_repository_config, \ - target_docker_repository_config -from exasol_integration_test_docker_environment.lib.config.log_config import log_config, WriteLogFilesToConsole - -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import RunDBTestParameter -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.frozendict_to_dict import FrozenDictToDict -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.frozendict_to_dict import ( + FrozenDictToDict, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.config.docker_config import ( + source_docker_repository_config, + target_docker_repository_config, +) +from exasol_integration_test_docker_environment.lib.config.log_config import ( + WriteLogFilesToConsole, + log_config, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) + +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + RunDBTestParameter, +) +from exasol_script_languages_container_tool.lib.utils.docker_utils import ( + exec_run_and_write_to_stream, +) + +DockerCredentials = namedtuple("DockerCredentials", "username password") -from exasol_script_languages_container_tool.lib.utils.docker_utils import exec_run_and_write_to_stream class DockerCommandException(Exception): """ @@ -24,9 +45,7 @@ class DockerCommandException(Exception): """ -class RunDBTest(FlavorBaseTask, - RunDBTestParameter, - DatabaseCredentialsParameter): +class RunDBTest(FlavorBaseTask, RunDBTestParameter, DatabaseCredentialsParameter): test_file = luigi.Parameter() def extend_output_path(self): @@ -39,17 +58,23 @@ def extend_output_path(self): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self._test_container_info = self.test_environment_info.test_container_info - self._database_info = self.test_environment_info.database_info + self._test_container_info = ( + self.test_environment_info.test_container_info # pylint: disable=no-member + ) # pylint: disable=no-member + self._database_info = ( + self.test_environment_info.database_info # pylint: disable=no-member + ) # pylint: disable=no-member def _run_command( - self, - docker_client: docker.client, - container: docker.models.containers.Container, - command: str, + self, + docker_client: docker.client, + container: docker.models.containers.Container, + command: str, ) -> str: file = StringIO() - exit_code = exec_run_and_write_to_stream(docker_client, container, command, file, {}) + exit_code = exec_run_and_write_to_stream( + docker_client, container, command, file, {} + ) if exit_code != 0: raise DockerCommandException(f"Command returned {exit_code}: {command}") return file.getvalue().strip() @@ -57,72 +82,112 @@ def _run_command( def run_task(self): self.logger.info("Running db tests") with self._get_docker_client() as docker_client: - test_container = docker_client.containers.get(self._test_container_info.container_name) + test_container = docker_client.containers.get( + self._test_container_info.container_name + ) odbc_driver = self._run_command( - docker_client, test_container, - "find /downloads/ODBC -name libexaodbc\*.so", + docker_client, + test_container, + r"find /downloads/ODBC -name libexaodbc\*.so", ) bash_cmd = self.generate_test_command(odbc_driver) test_output_file = self.get_log_path().joinpath("test_output") - exit_code = self.run_test_command(docker_client, bash_cmd, test_container, test_output_file) + exit_code = self.run_test_command( + docker_client, bash_cmd, test_container, test_output_file + ) self.handle_test_result(exit_code, test_output_file) @staticmethod def read_test_output_file(test_output_file: Path) -> str: - with open(test_output_file, "r") as f: + with open(test_output_file) as f: return f.read() def handle_test_result(self, exit_code: int, test_output_file: Path) -> None: - is_test_ok = (exit_code == 0) - if log_config().write_log_files_to_console == WriteLogFilesToConsole.all : - self.logger.info("Test results for db tests\n%s" - % self.read_test_output_file(test_output_file)) - if log_config().write_log_files_to_console == WriteLogFilesToConsole.only_error and not is_test_ok: - self.logger.error("Test results for db tests\n%s" - % self.read_test_output_file(test_output_file)) + is_test_ok = exit_code == 0 + if log_config().write_log_files_to_console == WriteLogFilesToConsole.all: + self.logger.info( + "Test results for db tests\n%s" + % self.read_test_output_file(test_output_file) + ) + if ( + log_config().write_log_files_to_console == WriteLogFilesToConsole.only_error + and not is_test_ok + ): + self.logger.error( + "Test results for db tests\n%s" + % self.read_test_output_file(test_output_file) + ) result = RunDBTestResult( test_file=self.test_file, language=self.language, is_test_ok=is_test_ok, - test_output_file=test_output_file) - JsonPickleTarget(self.get_output_path().joinpath("test_result.json")).write(result, 4) + test_output_file=test_output_file, + ) + JsonPickleTarget(self.get_output_path().joinpath("test_result.json")).write( + result, 4 + ) self.return_object(result) @staticmethod - def _get_docker_credentials() -> Optional[namedtuple]: - docker_credentials = namedtuple("DockerCredentials", "username password") - if source_docker_repository_config().username is not None and \ - source_docker_repository_config().password is not None: - return docker_credentials(source_docker_repository_config().username, - source_docker_repository_config().password) - if target_docker_repository_config().username is not None and \ - target_docker_repository_config().password is not None: - return docker_credentials(target_docker_repository_config().username, - target_docker_repository_config().password) + def _get_docker_credentials() -> Optional[DockerCredentials]: + + if ( + source_docker_repository_config().username is not None + and source_docker_repository_config().password is not None + ): + return DockerCredentials( + source_docker_repository_config().username, + source_docker_repository_config().password, + ) + if ( + target_docker_repository_config().username is not None + and target_docker_repository_config().password is not None + ): + return DockerCredentials( + target_docker_repository_config().username, + target_docker_repository_config().password, + ) return None - def run_test_command(self, docker_client: docker.client, bash_cmd: str, - test_container: docker.models.containers.Container, - test_output_file: Path) -> int: + def run_test_command( + self, + docker_client: docker.client, + bash_cmd: str, + test_container: docker.models.containers.Container, + test_output_file: Path, + ) -> int: environment = FrozenDictToDict().convert(self.test_environment_vars) docker_credentials = self.__class__._get_docker_credentials() if docker_credentials is not None: environment["DOCKER_USERNAME"] = docker_credentials.username environment["DOCKER_PASSWORD"] = docker_credentials.password - environment["TEST_ENVIRONMENT_TYPE"] = self.test_environment_info.type.name - environment["TEST_ENVIRONMENT_NAME"] = self.test_environment_info.name - environment["TEST_DOCKER_NETWORK_NAME"] = self.test_environment_info.network_info.network_name - if self.test_environment_info.database_info.container_info is not None: - environment["TEST_DOCKER_DB_CONTAINER_NAME"] = \ - self.test_environment_info.database_info.container_info.container_name + environment["TEST_ENVIRONMENT_TYPE"] = ( + self.test_environment_info.type.name # pylint: disable=no-member + ) # pylint: disable=no-member + environment["TEST_ENVIRONMENT_NAME"] = ( + self.test_environment_info.name # pylint: disable=no-member + ) # pylint: disable=no-member + environment["TEST_DOCKER_NETWORK_NAME"] = ( + self.test_environment_info.network_info.network_name # pylint: disable=no-member + ) + if ( + self.test_environment_info.database_info.container_info # pylint: disable=no-member + is not None + ): + environment["TEST_DOCKER_DB_CONTAINER_NAME"] = ( + self.test_environment_info.database_info.container_info.container_name # pylint: disable=no-member + ) self.logger.info(f"Writing test-log to {test_output_file}") - test_output = "command: " + bash_cmd + "\n" + \ - "environment: " + str(environment) + "\n" + test_output = ( + "command: " + bash_cmd + "\n" + "environment: " + str(environment) + "\n" + ) with test_output_file.open("w") as file: file.write(test_output) - exit_code = exec_run_and_write_to_stream(docker_client, test_container, bash_cmd, file, environment) + exit_code = exec_run_and_write_to_stream( + docker_client, test_container, bash_cmd, file, environment + ) return exit_code def generate_test_command(self, odbc_driver: str) -> str: @@ -136,18 +201,24 @@ def command_line(): "cd /tests/test/;", "python3", quote(self.test_file), - "--server", quote(f"{host}:{port}"), - "--user", quote(self.db_user), - "--password", quote(self.db_password), - "--script-languages", quote(self.language_definition), - "--lang-path", "/tests/lang", + "--server", + quote(f"{host}:{port}"), + "--user", + quote(self.db_user), + "--password", + quote(self.db_password), + "--script-languages", + quote(self.language_definition), + "--lang-path", + "/tests/lang", f"--loglevel={self.test_log_level}", f"--driver={odbc_driver}", - "--jdbc-path", "/downloads/JDBC/exajdbc.jar", + "--jdbc-path", + "/downloads/JDBC/exajdbc.jar", ] if self.language is not None: - yield from [ "--lang", self.language ] - yield from self.test_restrictions + yield from ["--lang", self.language] + yield from self.test_restrictions # pylint: disable=not-an-iterable - command = " ".join([ e for e in command_line() ]) + command = " ".join([e for e in command_line()]) return f'bash -c "{command}"' diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_files.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_files.py index 971a2fdc..668a04cd 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_files.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_files.py @@ -1,41 +1,61 @@ from typing import Any, Generator -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) from exasol_script_languages_container_tool.lib.tasks.test.run_db_test import RunDBTest -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import RunDBTestsInDirectory -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestCollectionResult, \ - RunDBTestFilesResult, \ - RunDBTestResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import RunDBTestFilesParameter, \ - ActualRunDBTestParameter +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import ( + RunDBTestsInDirectory, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestCollectionResult, + RunDBTestFilesResult, + RunDBTestResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + ActualRunDBTestParameter, + RunDBTestFilesParameter, +) -class RunDBTestFiles(FlavorBaseTask, - RunDBTestFilesParameter, - ActualRunDBTestParameter, - DatabaseCredentialsParameter): +class RunDBTestFiles( + FlavorBaseTask, + RunDBTestFilesParameter, + ActualRunDBTestParameter, + DatabaseCredentialsParameter, +): def extend_output_path(self): return self.caller_output_path + ("test_files",) def run_task(self): results = [] - for language in self.languages: + for language in self.languages: # pylint: disable=not-an-iterable results_for_language = [] - for test_file in self.test_files: + for test_file in self.test_files: # pylint: disable=not-an-iterable test_result = yield from self.run_test(language, test_file) results_for_language.append(test_result) - results.append(RunDBTestCollectionResult(language=language, - test_results=results_for_language)) + results.append( + RunDBTestCollectionResult( + language=language, test_results=results_for_language + ) + ) test_results = RunDBTestFilesResult(test_results=results) - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_results, 4) + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_results, 4 + ) self.return_object(test_results) - def run_test(self, language: str, test_file: str) -> \ - Generator[RunDBTestsInDirectory, Any, RunDBTestResult]: + def run_test( + self, language: str, test_file: str + ) -> Generator[RunDBTestsInDirectory, Any, RunDBTestResult]: task = self.create_child_task_with_common_params( RunDBTest, test_file=test_file, diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_folder.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_folder.py index c8795f94..8f961db4 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_folder.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_folder.py @@ -1,40 +1,59 @@ from typing import Any, Generator -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import RunDBTestsInDirectory -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestFoldersResult, \ - RunDBTestDirectoryResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import RunDBTestFolderParameter, \ - ActualRunDBTestParameter +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_in_directory import ( + RunDBTestsInDirectory, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestDirectoryResult, + RunDBTestFoldersResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + ActualRunDBTestParameter, + RunDBTestFolderParameter, +) -class RunDBTestFolder(FlavorBaseTask, - RunDBTestFolderParameter, - ActualRunDBTestParameter, - DatabaseCredentialsParameter): +class RunDBTestFolder( + FlavorBaseTask, + RunDBTestFolderParameter, + ActualRunDBTestParameter, + DatabaseCredentialsParameter, +): def extend_output_path(self): return self.caller_output_path + ("test_folder",) def run_task(self): results = [] - for language in self.languages: - for test_folder in self.test_folders: + for language in self.languages: # pylint: disable=not-an-iterable + for test_folder in self.test_folders: # pylint: disable=not-an-iterable test_result = yield from self.run_test(language, test_folder) results.append(test_result) self.return_object(RunDBTestFoldersResult(test_results=results)) - def run_test(self, language: str, test_folder: str) -> \ - Generator[RunDBTestsInDirectory, Any, RunDBTestDirectoryResult]: + def run_test( + self, language: str, test_folder: str + ) -> Generator[RunDBTestsInDirectory, Any, RunDBTestDirectoryResult]: task = self.create_child_task_with_common_params( RunDBTestsInDirectory, language=language, directory=test_folder, ) test_result_future = yield from self.run_dependencies(task) - test_result = self.get_values_from_future(test_result_future) # type: RunDBTestDirectoryResult - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_result, 4) + test_result = self.get_values_from_future( + test_result_future + ) # type: RunDBTestDirectoryResult + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_result, 4 + ) return test_result diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_in_directory.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_in_directory.py index 72be413b..29453acb 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_in_directory.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_in_directory.py @@ -1,19 +1,29 @@ -from typing import List, Generator, Any +from typing import Any, Generator, List import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) from exasol_script_languages_container_tool.lib.tasks.test.run_db_test import RunDBTest -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestDirectoryResult, \ - RunDBTestResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import RunDBTestParameter +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestDirectoryResult, + RunDBTestResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + RunDBTestParameter, +) -class RunDBTestsInDirectory(FlavorBaseTask, - RunDBTestParameter, - DatabaseCredentialsParameter): +class RunDBTestsInDirectory( + FlavorBaseTask, RunDBTestParameter, DatabaseCredentialsParameter +): directory = luigi.Parameter() def extend_output_path(self): @@ -21,15 +31,21 @@ def extend_output_path(self): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self._test_container_info = self.test_environment_info.test_container_info + self._test_container_info = ( + self.test_environment_info.test_container_info # pylint: disable=no-member + ) # pylint: disable=no-member self.tasks = self.create_test_tasks_from_directory(self.directory) def run_task(self): test_results = yield from self.run_tests() - result = RunDBTestDirectoryResult(test_results=test_results, - language=self.language, - test_folder=self.directory) - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_results, 4) + result = RunDBTestDirectoryResult( + test_results=test_results, + language=self.language, + test_folder=self.directory, + ) + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_results, 4 + ) self.return_object(result) def run_tests(self) -> Generator[RunDBTest, Any, List[RunDBTestResult]]: @@ -40,20 +56,24 @@ def run_tests(self) -> Generator[RunDBTest, Any, List[RunDBTestResult]]: test_results.append(test_result) return test_results - def create_test_tasks_from_directory( - self, directory: str): + def create_test_tasks_from_directory(self, directory: str): with self._get_docker_client() as docker_client: - test_container = docker_client.containers.get(self._test_container_info.container_name) - exit_code, ls_output = test_container.exec_run(cmd="ls /tests/test/%s/" % directory) + test_container = docker_client.containers.get( + self._test_container_info.container_name + ) + exit_code, ls_output = test_container.exec_run( + cmd="ls /tests/test/%s/" % directory + ) test_files = ls_output.decode("utf-8").split("\n") - tasks = [self.create_test_task(directory, test_file) - for test_file in test_files - if test_file != "" and test_file.endswith(".py")] + tasks = [ + self.create_test_task(directory, test_file) + for test_file in test_files + if test_file != "" and test_file.endswith(".py") + ] return tasks def create_test_task(self, directory: str, test_file: str): task = self.create_child_task_with_common_params( - RunDBTest, - test_file=directory + "/" + test_file + RunDBTest, test_file=directory + "/" + test_file ) return task diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_result.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_result.py index fbc14fe4..66bf0b79 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_result.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test_result.py @@ -3,11 +3,9 @@ class RunDBTestResult: - def __init__(self, - test_file: str, - language: str, - is_test_ok: bool, - test_output_file: Path): + def __init__( + self, test_file: str, language: str, is_test_ok: bool, test_output_file: Path + ): self.test_output_file = str(test_output_file) self.test_file = str(test_file) self.is_ok = is_test_ok @@ -22,41 +20,45 @@ def __init__(self, language: str, test_results: List[RunDBTestResult]): class RunDBTestDirectoryResult(RunDBTestCollectionResult): - def __init__(self, - test_folder: str, - language: str, - test_results: List[RunDBTestResult]): + def __init__( + self, test_folder: str, language: str, test_results: List[RunDBTestResult] + ): super().__init__(language, test_results) self.test_folder = test_folder class RunDBTestFilesResult: - def __init__(self, - test_results: List[RunDBTestCollectionResult]): + def __init__(self, test_results: List[RunDBTestCollectionResult]): self.test_results = test_results - self.tests_are_ok = all(test_result.tests_are_ok for test_result in test_results) + self.tests_are_ok = all( + test_result.tests_are_ok for test_result in test_results + ) class RunDBTestFoldersResult: - def __init__(self, - test_results: List[RunDBTestDirectoryResult]): + def __init__(self, test_results: List[RunDBTestDirectoryResult]): self.test_results = test_results - self.tests_are_ok = all(test_result.tests_are_ok for test_result in test_results) + self.tests_are_ok = all( + test_result.tests_are_ok for test_result in test_results + ) class RunDBTestsInTestConfigResult: - def __init__(self, - flavor_path: str, - release_goal: str, - generic_language_tests_output: RunDBTestFoldersResult, - test_folders_output: RunDBTestFoldersResult, - test_files_output: RunDBTestFilesResult): + def __init__( + self, + flavor_path: str, + release_goal: str, + generic_language_tests_output: RunDBTestFoldersResult, + test_folders_output: RunDBTestFoldersResult, + test_files_output: RunDBTestFilesResult, + ): self.release_goal = release_goal self.flavor_path = str(flavor_path) self.test_files_output = test_files_output self.test_folders_output = test_folders_output self.generic_language_tests_output = generic_language_tests_output - self.tests_are_ok = \ - generic_language_tests_output.tests_are_ok and \ - test_folders_output.tests_are_ok and \ - test_files_output.tests_are_ok + self.tests_are_ok = ( + generic_language_tests_output.tests_are_ok + and test_folders_output.tests_are_ok + and test_files_output.tests_are_ok + ) diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_in_test_config.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_in_test_config.py index a52aa11f..f30486a3 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_in_test_config.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_in_test_config.py @@ -1,51 +1,80 @@ from typing import Any, Generator -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentialsParameter - -from exasol_script_languages_container_tool.lib.tasks.test.run_db_generic_language_tests import RunDBGenericLanguageTest -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_files import RunDBTestFiles -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_folder import RunDBTestFolder -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestsInTestConfigResult, \ - RunDBTestFoldersResult, RunDBTestFilesResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import \ - RunDBTestsInTestConfigParameter, \ - ActualRunDBTestParameter - - -class RunDBTestsInTestConfig(FlavorBaseTask, - RunDBTestsInTestConfigParameter, - ActualRunDBTestParameter, - DatabaseCredentialsParameter): +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentialsParameter, +) + +from exasol_script_languages_container_tool.lib.tasks.test.run_db_generic_language_tests import ( + RunDBGenericLanguageTest, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_files import ( + RunDBTestFiles, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_folder import ( + RunDBTestFolder, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestFilesResult, + RunDBTestFoldersResult, + RunDBTestsInTestConfigResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + ActualRunDBTestParameter, + RunDBTestsInTestConfigParameter, +) + + +class RunDBTestsInTestConfig( + FlavorBaseTask, + RunDBTestsInTestConfigParameter, + ActualRunDBTestParameter, + DatabaseCredentialsParameter, +): # TODO fetch database logs after test execution def run_task(self): test_folders_output = yield from self.run_test_folder() test_files_output = yield from self.run_test_files() generic_language_test_output = yield from self.run_generic_language_test() - result = RunDBTestsInTestConfigResult(flavor_path=self.flavor_path, - release_goal=self.release_goal, - generic_language_tests_output=generic_language_test_output, - test_folders_output=test_folders_output, - test_files_output=test_files_output) - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(result, 4) + result = RunDBTestsInTestConfigResult( + flavor_path=self.flavor_path, + release_goal=self.release_goal, + generic_language_tests_output=generic_language_test_output, + test_folders_output=test_folders_output, + test_files_output=test_files_output, + ) + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + result, 4 + ) self.return_object(result) - def run_generic_language_test(self) -> \ - Generator[RunDBGenericLanguageTest, Any, RunDBTestFoldersResult]: - generic_language_test_task = self.create_child_task_with_common_params(RunDBGenericLanguageTest) - generic_language_test_output_future = yield from self.run_dependencies(generic_language_test_task) + def run_generic_language_test( + self, + ) -> Generator[RunDBGenericLanguageTest, Any, RunDBTestFoldersResult]: + generic_language_test_task = self.create_child_task_with_common_params( + RunDBGenericLanguageTest + ) + generic_language_test_output_future = yield from self.run_dependencies( + generic_language_test_task + ) return self.get_values_from_future(generic_language_test_output_future) - def run_test_files(self) -> \ - Generator[RunDBGenericLanguageTest, Any, RunDBTestFilesResult]: + def run_test_files( + self, + ) -> Generator[RunDBGenericLanguageTest, Any, RunDBTestFilesResult]: test_files_task = self.create_child_task_with_common_params(RunDBTestFiles) test_files_output_future = yield from self.run_dependencies(test_files_task) return self.get_values_from_future(test_files_output_future) - def run_test_folder(self) -> \ - Generator[RunDBGenericLanguageTest, Any, RunDBTestFoldersResult]: + def run_test_folder( + self, + ) -> Generator[RunDBGenericLanguageTest, Any, RunDBTestFoldersResult]: test_folder_task = self.create_child_task_with_common_params(RunDBTestFolder) test_folder_output_future = yield from self.run_dependencies(test_folder_task) return self.get_values_from_future(test_folder_output_future) diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_parameter.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_parameter.py index 7566ff22..681398de 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_parameter.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_tests_parameter.py @@ -1,6 +1,10 @@ import luigi -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.data.environment_info import EnvironmentInfo +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) +from exasol_integration_test_docker_environment.lib.data.environment_info import ( + EnvironmentInfo, +) class GeneralRunDBTestParameter: @@ -12,7 +16,9 @@ class GeneralRunDBTestParameter: class ActualRunDBTestParameter(GeneralRunDBTestParameter): release_goal = luigi.Parameter() language_definition = luigi.Parameter(significant=False) - test_environment_info = JsonPickleParameter(EnvironmentInfo, significant=False) # type: EnvironmentInfo + test_environment_info = JsonPickleParameter( + EnvironmentInfo, significant=False + ) # type: EnvironmentInfo class RunDBTestParameter(ActualRunDBTestParameter): @@ -35,7 +41,7 @@ class RunDBTestFilesParameter(RunDBLanguageTestParameter): test_files = luigi.ListParameter([]) -class RunDBTestsInTestConfigParameter(RunDBGenericLanguageTestParameter, - RunDBTestFolderParameter, - RunDBTestFilesParameter): +class RunDBTestsInTestConfigParameter( + RunDBGenericLanguageTestParameter, RunDBTestFolderParameter, RunDBTestFilesParameter +): pass diff --git a/exasol_script_languages_container_tool/lib/tasks/test/test_container.py b/exasol_script_languages_container_tool/lib/tasks/test/test_container.py index 96d4d4df..ee51db56 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/test_container.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/test_container.py @@ -2,43 +2,64 @@ from typing import Dict import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask, FlavorBaseTask +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, + FlavorsBaseTask, +) from exasol_integration_test_docker_environment.lib.base.info import Info -from exasol_integration_test_docker_environment.lib.base.json_pickle_target import JsonPickleTarget -from exasol_integration_test_docker_environment.lib.test_environment.parameter.spawn_test_environment_parameter import \ - SpawnTestEnvironmentParameter - -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestsInTestConfigResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import \ - RunDBTestsInTestConfigParameter, \ - GeneralRunDBTestParameter -from exasol_script_languages_container_tool.lib.tasks.test.test_runner_db_test_task import TestRunnerDBTestTask +from exasol_integration_test_docker_environment.lib.base.json_pickle_target import ( + JsonPickleTarget, +) +from exasol_integration_test_docker_environment.lib.test_environment.parameter.spawn_test_environment_parameter import ( + SpawnTestEnvironmentParameter, +) + +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestsInTestConfigResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + GeneralRunDBTestParameter, + RunDBTestsInTestConfigParameter, +) +from exasol_script_languages_container_tool.lib.tasks.test.test_runner_db_test_task import ( + TestRunnerDBTestTask, +) STATUS_INDENT = 2 -class TestContainerParameter(RunDBTestsInTestConfigParameter, - GeneralRunDBTestParameter): +class TestContainerParameter( + RunDBTestsInTestConfigParameter, GeneralRunDBTestParameter +): release_goals = luigi.ListParameter(["release"]) languages = luigi.ListParameter([None]) reuse_uploaded_container = luigi.BoolParameter(False, significant=False) class FlavorTestResult: - def __init__(self, flavor_path: str, test_results_per_release_goal: Dict[str, RunDBTestsInTestConfigResult]): + def __init__( + self, + flavor_path: str, + test_results_per_release_goal: Dict[str, RunDBTestsInTestConfigResult], + ): self.flavor_path = str(flavor_path) self.test_results_per_release_goal = test_results_per_release_goal - self.tests_are_ok = all(test_result.tests_are_ok - for test_result - in test_results_per_release_goal.values()) + self.tests_are_ok = all( + test_result.tests_are_ok + for test_result in test_results_per_release_goal.values() + ) class AllTestsResult(Info): - def __init__(self, test_results_per_flavor: Dict[str, FlavorTestResult], command_line_output_path: Path): + def __init__( + self, + test_results_per_flavor: Dict[str, FlavorTestResult], + command_line_output_path: Path, + ): self.test_results_per_flavor = test_results_per_flavor - self.tests_are_ok = all(test_result.tests_are_ok - for test_result - in test_results_per_flavor.values()) + self.tests_are_ok = all( + test_result.tests_are_ok for test_result in test_results_per_flavor.values() + ) self.command_line_output_path = command_line_output_path @@ -46,65 +67,96 @@ class TestStatusPrinter: def __init__(self, file): self.file = file - def print_status_for_all_tests(self, test_results: Dict[str,FlavorTestResult]): + def print_status_for_all_tests(self, test_results: Dict[str, FlavorTestResult]): for flavor, test_result_of_flavor in test_results.items(): - print(f"- Tests: {self.get_status_string(test_result_of_flavor.tests_are_ok)}", - file=self.file) - self.print_status_for_flavor(flavor, test_result_of_flavor, indent=STATUS_INDENT) - - def print_status_for_flavor(self, - flavor_path: str, - test_result_of_flavor: FlavorTestResult, - indent: int): - print(self.get_indent_str(indent) + - f"- Tests for flavor {flavor_path}: {self.get_status_string(test_result_of_flavor.tests_are_ok)}", - file=self.file) - for release_goal, test_results_of_release_goal \ - in test_result_of_flavor.test_results_per_release_goal.items(): - self.print_status_for_release_goal(release_goal, test_results_of_release_goal, - indent=indent + STATUS_INDENT) - - def print_status_for_release_goal(self, - release_goal: str, - test_results_of_release_goal: RunDBTestsInTestConfigResult, - indent: int): - print(self.get_indent_str(indent) + - f"- Tests for release goal {release_goal}: " + - self.get_status_string(test_results_of_release_goal.tests_are_ok), - file=self.file) - self.print_status_for_generic_language_tests(test_results_of_release_goal, indent=indent + STATUS_INDENT) - self.print_status_for_test_folders(test_results_of_release_goal, indent=indent + STATUS_INDENT) - self.print_status_for_test_files(test_results_of_release_goal, indent=indent + STATUS_INDENT) - - def print_status_for_test_files(self, - test_result_of_flavor: RunDBTestsInTestConfigResult, - indent: int): - for test_results_for_test_files in test_result_of_flavor.test_files_output.test_results: - print(self.get_indent_str(indent) + - f"- Tests in test files " - f"with language {test_results_for_test_files.language}: " - f"{self.get_status_string(test_results_for_test_files.tests_are_ok)}", - file=self.file) - - def print_status_for_test_folders(self, - test_result_of_flavor: RunDBTestsInTestConfigResult, - indent: int): - for test_results_for_test_folder in test_result_of_flavor.test_folders_output.test_results: - print(self.get_indent_str(indent) + - f"- Tests in test folder {test_results_for_test_folder.test_folder} " - f"with language {test_results_for_test_folder.test_folder}: " - f"{self.get_status_string(test_results_for_test_folder.tests_are_ok)}", - file=self.file) - - def print_status_for_generic_language_tests(self, - test_result_of_flavor: RunDBTestsInTestConfigResult, - indent: int): - for test_results_for_test_folder in test_result_of_flavor.generic_language_tests_output.test_results: - print(self.get_indent_str(indent) + - f"- Tests in test folder {test_results_for_test_folder.test_folder}" - f"with language {test_results_for_test_folder.language}: " - f"{self.get_status_string(test_results_for_test_folder.tests_are_ok)}", - file=self.file) + print( + f"- Tests: {self.get_status_string(test_result_of_flavor.tests_are_ok)}", + file=self.file, + ) + self.print_status_for_flavor( + flavor, test_result_of_flavor, indent=STATUS_INDENT + ) + + def print_status_for_flavor( + self, flavor_path: str, test_result_of_flavor: FlavorTestResult, indent: int + ): + print( + self.get_indent_str(indent) + + f"- Tests for flavor {flavor_path}: {self.get_status_string(test_result_of_flavor.tests_are_ok)}", + file=self.file, + ) + for ( + release_goal, + test_results_of_release_goal, + ) in test_result_of_flavor.test_results_per_release_goal.items(): + self.print_status_for_release_goal( + release_goal, + test_results_of_release_goal, + indent=indent + STATUS_INDENT, + ) + + def print_status_for_release_goal( + self, + release_goal: str, + test_results_of_release_goal: RunDBTestsInTestConfigResult, + indent: int, + ): + print( + self.get_indent_str(indent) + + f"- Tests for release goal {release_goal}: " + + self.get_status_string(test_results_of_release_goal.tests_are_ok), + file=self.file, + ) + self.print_status_for_generic_language_tests( + test_results_of_release_goal, indent=indent + STATUS_INDENT + ) + self.print_status_for_test_folders( + test_results_of_release_goal, indent=indent + STATUS_INDENT + ) + self.print_status_for_test_files( + test_results_of_release_goal, indent=indent + STATUS_INDENT + ) + + def print_status_for_test_files( + self, test_result_of_flavor: RunDBTestsInTestConfigResult, indent: int + ): + for ( + test_results_for_test_files + ) in test_result_of_flavor.test_files_output.test_results: + print( + self.get_indent_str(indent) + f"- Tests in test files " + f"with language {test_results_for_test_files.language}: " + f"{self.get_status_string(test_results_for_test_files.tests_are_ok)}", + file=self.file, + ) + + def print_status_for_test_folders( + self, test_result_of_flavor: RunDBTestsInTestConfigResult, indent: int + ): + for ( + test_results_for_test_folder + ) in test_result_of_flavor.test_folders_output.test_results: + print( + self.get_indent_str(indent) + + f"- Tests in test folder {test_results_for_test_folder.test_folder} " + f"with language {test_results_for_test_folder.test_folder}: " + f"{self.get_status_string(test_results_for_test_folder.tests_are_ok)}", + file=self.file, + ) + + def print_status_for_generic_language_tests( + self, test_result_of_flavor: RunDBTestsInTestConfigResult, indent: int + ): + for ( + test_results_for_test_folder + ) in test_result_of_flavor.generic_language_tests_output.test_results: + print( + self.get_indent_str(indent) + + f"- Tests in test folder {test_results_for_test_folder.test_folder}" + f"with language {test_results_for_test_folder.language}: " + f"{self.get_status_string(test_results_for_test_folder.tests_are_ok)}", + file=self.file, + ) @staticmethod def get_indent_str(indent: int): @@ -112,57 +164,73 @@ def get_indent_str(indent: int): @staticmethod def get_status_string(status: bool): - return 'OK' if status else 'FAILED' + return "OK" if status else "FAILED" -class TestContainer(FlavorsBaseTask, - TestContainerParameter, - SpawnTestEnvironmentParameter): +class TestContainer( + FlavorsBaseTask, TestContainerParameter, SpawnTestEnvironmentParameter +): def __init__(self, *args, **kwargs): self.test_results_futures = None super().__init__(*args, **kwargs) - command_line_output_path = self.get_output_path().joinpath("command_line_output") - self.command_line_output_target = luigi.LocalTarget(str(command_line_output_path)) + command_line_output_path = self.get_output_path().joinpath( + "command_line_output" + ) + self.command_line_output_target = luigi.LocalTarget( + str(command_line_output_path) + ) def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - TestFlavorContainer) # type: Dict[str,TestFlavorContainer] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + TestFlavorContainer + ) # type: Dict[str,TestFlavorContainer] self.test_results_futures = self.register_dependencies(tasks) def run_task(self): - test_results = self.get_values_from_futures( - self.test_results_futures) # type: Dict[str,FlavorTestResult] - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_results, 4) + test_results = self.get_values_from_futures( # type: ignore + self.test_results_futures + ) # type: Dict[str,FlavorTestResult] + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_results, 4 + ) with self.command_line_output_target.open("w") as file: TestStatusPrinter(file).print_status_for_all_tests(test_results) - test_result = AllTestsResult(test_results_per_flavor=test_results, - command_line_output_path=Path(self.command_line_output_target.path)) + test_result = AllTestsResult( + test_results_per_flavor=test_results, + command_line_output_path=Path(self.command_line_output_target.path), + ) self.return_object(test_result) -class TestFlavorContainer(FlavorBaseTask, - TestContainerParameter, - SpawnTestEnvironmentParameter): +class TestFlavorContainer( + FlavorBaseTask, TestContainerParameter, SpawnTestEnvironmentParameter +): def __init__(self, *args, **kwargs): self.test_result_futures = None super().__init__(*args, **kwargs) def register_required(self): - tasks = {release_goal: self.generate_tasks_for_flavor(release_goal) - for release_goal in self.release_goals} + tasks = { + release_goal: self.generate_tasks_for_flavor(release_goal) + for release_goal in self.release_goals # type: ignore # pylint: disable=not-an-iterable + } self.test_result_futures = self.register_dependencies(tasks) def generate_tasks_for_flavor(self, release_goal: str): - task = self.create_child_task_with_common_params(TestRunnerDBTestTask, - release_goal=release_goal) + task = self.create_child_task_with_common_params( + TestRunnerDBTestTask, release_goal=release_goal + ) return task def run_task(self): - test_results = self.get_values_from_futures( - self.test_result_futures) # type: Dict[str,RunDBTestsInTestConfigResult] + test_results = self.get_values_from_futures( # type: ignore + self.test_result_futures + ) # type: Dict[str,RunDBTestsInTestConfigResult] result = FlavorTestResult(self.flavor_path, test_results) - JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write(test_results, 4) + JsonPickleTarget(self.get_output_path().joinpath("test_results.json")).write( + test_results, 4 + ) self.return_object(result) diff --git a/exasol_script_languages_container_tool/lib/tasks/test/test_container_content.py b/exasol_script_languages_container_tool/lib/tasks/test/test_container_content.py index 7f410536..25ac570e 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/test_container_content.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/test_container_content.py @@ -1,12 +1,17 @@ from pathlib import Path -from exasol_integration_test_docker_environment.lib.data.test_container_content_description import \ - TestContainerContentDescription, TestContainerBuildMapping, TestContainerRuntimeMapping +from exasol_integration_test_docker_environment.lib.data.test_container_content_description import ( + TestContainerBuildMapping, + TestContainerContentDescription, + TestContainerRuntimeMapping, +) TEST_DATA_TARGET = "/tests_data" -def build_test_container_content(test_container_folder: str) -> TestContainerContentDescription: +def build_test_container_content( + test_container_folder: str, +) -> TestContainerContentDescription: test_container_path = Path(test_container_folder) test_container_build_path = test_container_path / "build" test_container_build_deps_path = test_container_path / "build" / "deps" @@ -14,12 +19,19 @@ def build_test_container_content(test_container_folder: str) -> TestContainerCon test_container_tests_path = test_container_path / "tests" return TestContainerContentDescription( docker_file=str(test_container_build_path / "Dockerfile"), - build_files_and_directories=[TestContainerBuildMapping(source=test_container_build_deps_path, - target="deps")], - runtime_mappings=[TestContainerRuntimeMapping(source=test_container_tests_path, - target="/tests_src", - deployment_target="/tests"), - TestContainerRuntimeMapping(source=test_container_test_data_path, - target=TEST_DATA_TARGET) - ] + build_files_and_directories=[ + TestContainerBuildMapping( + source=test_container_build_deps_path, target="deps" + ) + ], + runtime_mappings=[ + TestContainerRuntimeMapping( + source=test_container_tests_path, + target="/tests_src", + deployment_target="/tests", + ), + TestContainerRuntimeMapping( + source=test_container_test_data_path, target=TEST_DATA_TARGET + ), + ], ) diff --git a/exasol_script_languages_container_tool/lib/tasks/test/test_runner_db_test_task.py b/exasol_script_languages_container_tool/lib/tasks/test/test_runner_db_test_task.py index 776e172d..5583b1f7 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/test_runner_db_test_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/test_runner_db_test_task.py @@ -1,30 +1,67 @@ import pathlib -from typing import Generator, Any, Dict +from typing import Any, Dict, Generator import luigi from docker.models.containers import ExecResult -from exasol_integration_test_docker_environment.lib.base.db_os_executor import DbOsExecFactory, SshExecFactory, \ - DockerClientFactory, DockerExecFactory, DbOsExecutor -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.data.database_info import DatabaseInfo -from exasol_integration_test_docker_environment.lib.test_environment.parameter.docker_db_test_environment_parameter import \ - DbOsAccess - -from exasol_script_languages_container_tool.lib.tasks.export.export_containers import ExportFlavorContainer -from exasol_script_languages_container_tool.lib.tasks.export.export_info import ExportInfo -from exasol_script_languages_container_tool.lib.tasks.test.populate_test_engine import PopulateTestEngine -from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import RunDBTestsInTestConfigResult -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_in_test_config import RunDBTestsInTestConfig -from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import RunDBTestsInTestConfigParameter -from exasol_script_languages_container_tool.lib.tasks.test.upload_exported_container import UploadExportedContainer -from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import LanguageDefinition -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask -from exasol_integration_test_docker_environment.lib.data.database_credentials import DatabaseCredentials -from exasol_integration_test_docker_environment.lib.data.environment_info import EnvironmentInfo -from exasol_integration_test_docker_environment.lib.data.environment_type import EnvironmentType -from exasol_integration_test_docker_environment.lib.test_environment.parameter.spawn_test_environment_parameter import \ - SpawnTestEnvironmentParameter -from exasol_integration_test_docker_environment.lib.test_environment.spawn_test_environment import SpawnTestEnvironment +from exasol_integration_test_docker_environment.lib.base.db_os_executor import ( + DbOsExecFactory, + DbOsExecutor, + DockerClientFactory, + DockerExecFactory, + SshExecFactory, +) +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) +from exasol_integration_test_docker_environment.lib.data.database_credentials import ( + DatabaseCredentials, +) +from exasol_integration_test_docker_environment.lib.data.database_info import ( + DatabaseInfo, +) +from exasol_integration_test_docker_environment.lib.data.environment_info import ( + EnvironmentInfo, +) +from exasol_integration_test_docker_environment.lib.data.environment_type import ( + EnvironmentType, +) +from exasol_integration_test_docker_environment.lib.test_environment.parameter.docker_db_test_environment_parameter import ( + DbOsAccess, +) # pylint: disable=line-too-long +from exasol_integration_test_docker_environment.lib.test_environment.parameter.spawn_test_environment_parameter import ( + SpawnTestEnvironmentParameter, +) # pylint: disable=line-too-long +from exasol_integration_test_docker_environment.lib.test_environment.spawn_test_environment import ( + SpawnTestEnvironment, +) + +from exasol_script_languages_container_tool.lib.tasks.export.export_containers import ( + ExportFlavorContainer, +) +from exasol_script_languages_container_tool.lib.tasks.export.export_info import ( + ExportInfo, +) +from exasol_script_languages_container_tool.lib.tasks.test.populate_test_engine import ( + PopulateTestEngine, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_test_result import ( + RunDBTestsInTestConfigResult, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_in_test_config import ( + RunDBTestsInTestConfig, +) +from exasol_script_languages_container_tool.lib.tasks.test.run_db_tests_parameter import ( + RunDBTestsInTestConfigParameter, +) +from exasol_script_languages_container_tool.lib.tasks.test.upload_exported_container import ( + UploadExportedContainer, +) +from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import ( + LanguageDefinition, +) class DummyExecutor(DbOsExecutor): @@ -47,9 +84,9 @@ def executor(self) -> DbOsExecutor: return DummyExecutor() -class TestRunnerDBTestTask(FlavorBaseTask, - SpawnTestEnvironmentParameter, - RunDBTestsInTestConfigParameter): +class TestRunnerDBTestTask( + FlavorBaseTask, SpawnTestEnvironmentParameter, RunDBTestsInTestConfigParameter +): reuse_uploaded_container = luigi.BoolParameter(False, significant=False) release_goal = luigi.Parameter() @@ -62,32 +99,35 @@ def register_required(self): self.register_spawn_test_environment() def register_export_container(self): - export_container_task = self.create_child_task(ExportFlavorContainer, - release_goals=[ - self.release_goal], - flavor_path=self.flavor_path) - self._export_infos_future = self.register_dependency( - export_container_task) + export_container_task = self.create_child_task( + ExportFlavorContainer, + release_goals=[self.release_goal], + flavor_path=self.flavor_path, + ) + self._export_infos_future = self.register_dependency(export_container_task) def register_spawn_test_environment(self): test_environment_name = f"""{self.get_flavor_name()}_{self.release_goal}""" - spawn_test_environment_task = \ - self.create_child_task_with_common_params( - SpawnTestEnvironment, - environment_name=test_environment_name) + spawn_test_environment_task = self.create_child_task_with_common_params( + SpawnTestEnvironment, environment_name=test_environment_name + ) self._test_environment_info_future = self.register_dependency( - spawn_test_environment_task) + spawn_test_environment_task + ) def run_task(self): - export_infos = self.get_values_from_future( - self._export_infos_future) # type: Dict[str,ExportInfo] + export_infos = self.get_values_from_future( # type: ignore + self._export_infos_future + ) # type: Dict[str,ExportInfo] export_info = export_infos[self.release_goal] - self.test_environment_info = self.get_values_from_future( - self._test_environment_info_future) # type: EnvironmentInfo + self.test_environment_info = self.get_values_from_future( # type: ignore + self._test_environment_info_future + ) # type: EnvironmentInfo database_credentials = self.get_database_credentials() - yield from self.upload_container(database_credentials, - export_info) - yield from self.populate_test_engine_data(self.test_environment_info, database_credentials) + yield from self.upload_container(database_credentials, export_info) + yield from self.populate_test_engine_data( + self.test_environment_info, database_credentials + ) test_results = yield from self.run_test(self.test_environment_info, export_info) self.return_object(test_results) @@ -97,14 +137,19 @@ def _executor_factory(self, database_info: DatabaseInfo) -> DbOsExecFactory: return SshExecFactory.from_database_info(database_info) client_factory = DockerClientFactory(timeout=100000) if database_info.container_info is not None: - return DockerExecFactory(database_info.container_info.container_name, client_factory) + return DockerExecFactory( + database_info.container_info.container_name, client_factory + ) return DummyExecFactory() - def upload_container(self, database_credentials: DatabaseCredentials, export_info: ExportInfo): - reuse = \ - self.reuse_database and \ - self.reuse_uploaded_container and \ - not export_info.is_new + def upload_container( + self, database_credentials: DatabaseCredentials, export_info: ExportInfo + ): + reuse = ( + self.reuse_database + and self.reuse_uploaded_container + and not export_info.is_new + ) upload_task = self.create_child_task_with_common_params( UploadExportedContainer, export_info=export_info, @@ -113,13 +158,21 @@ def upload_container(self, database_credentials: DatabaseCredentials, export_inf release_name=export_info.name, reuse_uploaded=reuse, bucketfs_write_password=database_credentials.bucketfs_write_password, - executor_factory=self._executor_factory(self.test_environment_info.database_info) + executor_factory=self._executor_factory( + self.test_environment_info.database_info + ), ) yield from self.run_dependencies(upload_task) - def populate_test_engine_data(self, test_environment_info: EnvironmentInfo, - database_credentials: DatabaseCredentials) -> None: - reuse = self.reuse_database_setup and self.test_environment_info.database_info.reused + def populate_test_engine_data( # type: ignore + self, + test_environment_info: EnvironmentInfo, + database_credentials: DatabaseCredentials, + ) -> None: + reuse = ( + self.reuse_database_setup + and self.test_environment_info.database_info.reused + ) if not reuse: task = self.create_child_task( PopulateTestEngine, @@ -127,24 +180,27 @@ def populate_test_engine_data(self, test_environment_info: EnvironmentInfo, environment_name=self.test_environment_info.name, db_user=database_credentials.db_user, db_password=database_credentials.db_password, - bucketfs_write_password=database_credentials.bucketfs_write_password + bucketfs_write_password=database_credentials.bucketfs_write_password, ) yield from self.run_dependencies(task) def get_database_credentials(self) -> DatabaseCredentials: if self.environment_type == EnvironmentType.external_db: - return \ - DatabaseCredentials(db_user=self.external_exasol_db_user, - db_password=self.external_exasol_db_password, - bucketfs_write_password=self.external_exasol_bucketfs_write_password) + return DatabaseCredentials( + db_user=self.external_exasol_db_user, + db_password=self.external_exasol_db_password, + bucketfs_write_password=self.external_exasol_bucketfs_write_password, + ) else: - return \ - DatabaseCredentials(db_user=SpawnTestEnvironment.DEFAULT_DB_USER, - db_password=SpawnTestEnvironment.DEFAULT_DATABASE_PASSWORD, - bucketfs_write_password=SpawnTestEnvironment.DEFAULT_BUCKETFS_WRITE_PASSWORD) + return DatabaseCredentials( + db_user=SpawnTestEnvironment.DEFAULT_DB_USER, + db_password=SpawnTestEnvironment.DEFAULT_DATABASE_PASSWORD, + bucketfs_write_password=SpawnTestEnvironment.DEFAULT_BUCKETFS_WRITE_PASSWORD, + ) - def run_test(self, test_environment_info: EnvironmentInfo, export_info: ExportInfo) -> \ - Generator[RunDBTestsInTestConfig, Any, RunDBTestsInTestConfigResult]: + def run_test( + self, test_environment_info: EnvironmentInfo, export_info: ExportInfo + ) -> Generator[RunDBTestsInTestConfig, Any, RunDBTestsInTestConfigResult]: test_config = self.read_test_config() generic_language_tests = self.get_generic_language_tests(test_config) test_folders = self.get_test_folders(test_config) @@ -156,7 +212,8 @@ def run_test(self, test_environment_info: EnvironmentInfo, export_info: ExportIn bucket_name="myudfs", bucketfs_name="bfsdefault", path_in_bucket="", - add_missing_builtin=True) + add_missing_builtin=True, + ) task = self.create_child_task_with_common_params( RunDBTestsInTestConfig, test_environment_info=test_environment_info, @@ -165,11 +222,12 @@ def run_test(self, test_environment_info: EnvironmentInfo, export_info: ExportIn language_definition=language_definition.generate_definition(), db_user=database_credentials.db_user, db_password=database_credentials.db_password, - bucketfs_write_password=database_credentials.bucketfs_write_password + bucketfs_write_password=database_credentials.bucketfs_write_password, ) test_output_future = yield from self.run_dependencies(task) test_output = self.get_values_from_future( - test_output_future) # type: RunDBTestsInTestConfigResult + test_output_future + ) # type: RunDBTestsInTestConfigResult return test_output @staticmethod @@ -191,21 +249,24 @@ def get_test_folders(self, test_config): return test_folders def tests_specified_in_parameters(self): - return len(self.generic_language_tests) != 0 or \ - len(self.test_folders) != 0 or \ - len(self.test_files) != 0 + return ( + len(self.generic_language_tests) != 0 + or len(self.test_folders) != 0 + or len(self.test_files) != 0 + ) def get_generic_language_tests(self, test_config): generic_language_tests = [] if test_config["generic_language_tests"] != "": - generic_language_tests = test_config["generic_language_tests"].split( - " ") + generic_language_tests = test_config["generic_language_tests"].split(" ") if self.tests_specified_in_parameters(): generic_language_tests = self.generic_language_tests return generic_language_tests def read_test_config(self): - with pathlib.Path(self.flavor_path).joinpath("flavor_base").joinpath("testconfig").open("r") as file: + with pathlib.Path(self.flavor_path).joinpath("flavor_base").joinpath( + "testconfig" + ).open("r") as file: test_config_str = file.read() test_config = {} for line in test_config_str.splitlines(): diff --git a/exasol_script_languages_container_tool/lib/tasks/test/upload_exported_container.py b/exasol_script_languages_container_tool/lib/tasks/test/upload_exported_container.py index a4ee9a2b..4a9fc9e6 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/upload_exported_container.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/upload_exported_container.py @@ -1,10 +1,16 @@ import pathlib import luigi -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) -from exasol_script_languages_container_tool.lib.tasks.export.export_info import ExportInfo -from exasol_script_languages_container_tool.lib.tasks.test.upload_file_to_bucket_fs import UploadFileToBucketFS +from exasol_script_languages_container_tool.lib.tasks.export.export_info import ( + ExportInfo, +) +from exasol_script_languages_container_tool.lib.tasks.test.upload_file_to_bucket_fs import ( + UploadFileToBucketFS, +) class UploadExportedContainer(UploadFileToBucketFS): @@ -16,14 +22,16 @@ def get_log_file(self): return "/exa/logs/cored/bucketfsd*" def get_pattern_to_wait_for(self): - return self.export_info.name + ".*extracted" + return self.export_info.name + ".*extracted" # pylint: disable=no-member def get_file_to_upload(self): - file = self.export_info.cache_file + file = self.export_info.cache_file # pylint: disable=no-member return file def get_upload_target(self): - return "myudfs/" + self.export_info.name + ".tar.gz" + return ( + "myudfs/" + self.export_info.name + ".tar.gz" # pylint: disable=no-member + ) # pylint: disable=no-member def get_sync_time_estimation(self) -> int: return 1 * 60 diff --git a/exasol_script_languages_container_tool/lib/tasks/test/upload_file_to_bucket_fs.py b/exasol_script_languages_container_tool/lib/tasks/test/upload_file_to_bucket_fs.py index 85ca14e2..2bfc594b 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/upload_file_to_bucket_fs.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/upload_file_to_bucket_fs.py @@ -2,30 +2,38 @@ from pathlib import Path from typing import Tuple +import exasol.bucketfs as bfs # type: ignore import luigi from docker.models.containers import Container # TODO add timeout, because sometimes the upload stucks -from exasol_integration_test_docker_environment.abstract_method_exception import AbstractMethodException -from exasol_integration_test_docker_environment.lib.base.docker_base_task import DockerBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.base.still_running_logger import StillRunningLogger, \ - StillRunningLoggerThread -from exasol_integration_test_docker_environment.lib.data.environment_info \ - import EnvironmentInfo -from exasol_integration_test_docker_environment \ - .lib.test_environment.database_setup.docker_db_log_based_bucket_sync_checker \ - import DockerDBLogBasedBucketFSSyncChecker -from exasol_integration_test_docker_environment \ - .lib.test_environment.database_setup.time_based_bucketfs_sync_waiter \ - import TimeBasedBucketFSSyncWaiter -from exasol_integration_test_docker_environment \ - .lib.base.db_os_executor import ( - DbOsExecutor, - DbOsExecFactory, - ) +from exasol_integration_test_docker_environment.abstract_method_exception import ( + AbstractMethodException, +) +from exasol_integration_test_docker_environment.lib.base.db_os_executor import ( + DbOsExecFactory, + DbOsExecutor, +) +from exasol_integration_test_docker_environment.lib.base.docker_base_task import ( + DockerBaseTask, +) +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) +from exasol_integration_test_docker_environment.lib.base.still_running_logger import ( + StillRunningLogger, + StillRunningLoggerThread, +) +from exasol_integration_test_docker_environment.lib.data.environment_info import ( + EnvironmentInfo, +) +from exasol_integration_test_docker_environment.lib.test_environment.database_setup.docker_db_log_based_bucket_sync_checker import ( + DockerDBLogBasedBucketFSSyncChecker, +) # pylint: disable=line-too-long +from exasol_integration_test_docker_environment.lib.test_environment.database_setup.time_based_bucketfs_sync_waiter import ( + TimeBasedBucketFSSyncWaiter, +) # pylint: disable=line-too-long -import exasol.bucketfs as bfs @dataclasses.dataclass class UploadResult: @@ -36,15 +44,19 @@ class UploadResult: class UploadFileToBucketFS(DockerBaseTask): environment_name = luigi.Parameter() test_environment_info = JsonPickleParameter( - EnvironmentInfo, significant=False) # type: EnvironmentInfo + EnvironmentInfo, significant=False + ) # type: EnvironmentInfo reuse_uploaded = luigi.BoolParameter(False, significant=False) bucketfs_write_password = luigi.Parameter( - significant=False, visibility=luigi.parameter.ParameterVisibility.HIDDEN) - executor_factory=JsonPickleParameter(DbOsExecFactory, significant=False) + significant=False, visibility=luigi.parameter.ParameterVisibility.HIDDEN + ) + executor_factory = JsonPickleParameter(DbOsExecFactory, significant=False) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self._database_info = self.test_environment_info.database_info + self._database_info = ( + self.test_environment_info.database_info # pylint: disable=no-member + ) # pylint: disable=no-member def run_task(self): file_to_upload = self.get_file_to_upload() @@ -56,11 +68,12 @@ def run_task(self): with self._get_docker_client() as docker_client: if self._database_info.container_info is not None: database_container = docker_client.containers.get( - self._database_info.container_info.container_name) + self._database_info.container_info.container_name + ) else: database_container = None if not self.should_be_reused(upload_target): - with self.executor_factory.executor() as executor: + with self.executor_factory.executor() as executor: # pylint: disable=no-member executor.prepare() self.upload_and_wait( database_container, @@ -71,28 +84,29 @@ def run_task(self): sync_time_estimation, db_os_executor=executor, ) - self.return_object(UploadResult( - upload_target=upload_target, - reused=False - )) + self.return_object( + UploadResult(upload_target=upload_target, reused=False) + ) else: - self.logger.warning("Reusing uploaded target %s instead of file %s", - upload_target, file_to_upload) + self.logger.warning( + "Reusing uploaded target %s instead of file %s", + upload_target, + file_to_upload, + ) self.write_logs("Reusing") - self.return_object(UploadResult( - upload_target=upload_target, - reused=True - )) + self.return_object( + UploadResult(upload_target=upload_target, reused=True) + ) def upload_and_wait( - self, - database_container, - file_to_upload: str, - upload_target: str, - log_file: str, - pattern_to_wait_for: str, - sync_time_estimation: int, - db_os_executor: DbOsExecutor, + self, + database_container, + file_to_upload: str, + upload_target: str, + log_file: str, + pattern_to_wait_for: str, + sync_time_estimation: int, + db_os_executor: DbOsExecutor, ): still_running_logger = StillRunningLogger( self.logger, @@ -120,12 +134,12 @@ def upload_and_wait( thread.join() def get_sync_checker( - self, - database_container: Container, - sync_time_estimation: int, - log_file: str, - pattern_to_wait_for: str, - db_os_executor: DbOsExecutor, + self, + database_container: Container, + sync_time_estimation: int, + log_file: str, + pattern_to_wait_for: str, + db_os_executor: DbOsExecutor, ): if database_container is not None: return DockerDBLogBasedBucketFSSyncChecker( @@ -161,23 +175,23 @@ def bucket_fs_url(self): def build_file_path_in_bucket(self, upload_target: str) -> bfs.path.PathLike: backend = bfs.path.StorageBackend.onprem - bucket_name, path_in_bucket, file_in_bucket = self.split_upload_target(upload_target) + bucket_name, path_in_bucket, file_in_bucket = self.split_upload_target( + upload_target + ) path_in_bucket_to_upload_path = bfs.path.build_path( backend=backend, url=self.bucket_fs_url, bucket_name=bucket_name, - service_name='bfsdefault', + service_name="bfsdefault", path=path_in_bucket, - username='w', + username="w", password=self.bucketfs_write_password, - verify=False + verify=False, ) return path_in_bucket_to_upload_path / file_in_bucket - def upload_file(self, file_to_upload: str, upload_target: str): - self.logger.info("upload file %s to %s", - file_to_upload, upload_target) + self.logger.info("upload file %s to %s", file_to_upload, upload_target) file_in_bucket_to_upload_path = self.build_file_path_in_bucket(upload_target) with open(file_to_upload, "rb") as f: file_in_bucket_to_upload_path.write(f) @@ -203,4 +217,4 @@ def get_upload_target(self) -> str: def get_sync_time_estimation(self) -> int: """Estimated time in seconds which the bucketfs needs to extract and sync a uploaded file""" - raise AbstractMethodException() \ No newline at end of file + raise AbstractMethodException() diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/language_definition.py b/exasol_script_languages_container_tool/lib/tasks/upload/language_definition.py index 79170017..b55e0606 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/language_definition.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/language_definition.py @@ -5,13 +5,15 @@ class LanguageDefinition: - def __init__(self, - release_name: str, - flavor_path: str, - bucketfs_name: str, - bucket_name: str, - path_in_bucket: Optional[str], - add_missing_builtin: bool = False): + def __init__( + self, + release_name: str, + flavor_path: str, + bucketfs_name: str, + bucket_name: str, + path_in_bucket: Optional[str], + add_missing_builtin: bool = False, + ): self.path_in_bucket = path_in_bucket self.bucket_name = bucket_name self.bucketfs_name = bucketfs_name @@ -22,7 +24,9 @@ def __init__(self, def generate_definition(self): language_definition = self._render_language_definition() if self.add_missing_builtin: - language_definition = self._add_missing_builtin_language_definitions(language_definition) + language_definition = self._add_missing_builtin_language_definitions( + language_definition + ) return language_definition.strip() def _render_language_definition(self): @@ -30,19 +34,24 @@ def _render_language_definition(self): if path_in_bucket != "" and not path_in_bucket.endswith("/"): path_in_bucket = path_in_bucket + "/" language_definition_path = Path( - self.flavor_path, "flavor_base", "language_definition") + self.flavor_path, "flavor_base", "language_definition" + ) with language_definition_path.open("r") as f: language_definition_template = f.read() template = Template(language_definition_template) - language_definition = template.render(bucketfs_name=self.bucketfs_name, - bucket_name=self.bucket_name, - path_in_bucket=path_in_bucket, - release_name=self.release_name) + language_definition = template.render( + bucketfs_name=self.bucketfs_name, + bucket_name=self.bucket_name, + path_in_bucket=path_in_bucket, + release_name=self.release_name, + ) return language_definition def _add_missing_builtin_language_definitions(self, language_definition): builtin_aliases = {"PYTHON", "PYTHON3", "JAVA", "R"} - defined_aliases = {alias.split("=")[0] for alias in language_definition.split(" ")} + defined_aliases = { + alias.split("=")[0] for alias in language_definition.split(" ") + } missing_aliases = builtin_aliases - defined_aliases sorted_missing_aliases = sorted(missing_aliases) additional_language_defintions = " ".join( diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_base_task.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_base_task.py index 255bb98c..90792575 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_base_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_base_task.py @@ -3,13 +3,23 @@ import luigi import requests -from exasol_integration_test_docker_environment.abstract_method_exception import AbstractMethodException -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorBaseTask +from exasol_integration_test_docker_environment.abstract_method_exception import ( + AbstractMethodException, +) +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorBaseTask, +) from requests.auth import HTTPBasicAuth -from exasol_script_languages_container_tool.lib.tasks.export.export_info import ExportInfo -from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import LanguageDefinition -from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_parameter import UploadContainerParameter +from exasol_script_languages_container_tool.lib.tasks.export.export_info import ( + ExportInfo, +) +from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import ( + LanguageDefinition, +) +from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_parameter import ( + UploadContainerParameter, +) class UploadContainerBaseTask(FlavorBaseTask, UploadContainerParameter): @@ -33,26 +43,28 @@ def get_export_task(self): def run_task(self): export_info = self.get_values_from_future(self.export_info_future) self._upload_container(export_info) - language_definition = \ - LanguageDefinition(release_name=self._get_complete_release_name(export_info), - flavor_path=self.flavor_path, - bucketfs_name=self.bucketfs_name, - bucket_name=self.bucket_name, - path_in_bucket=self.path_in_bucket) - command_line_output_str = \ - self.generate_command_line_output_str( - language_definition, export_info) + language_definition = LanguageDefinition( + release_name=self._get_complete_release_name(export_info), + flavor_path=self.flavor_path, + bucketfs_name=self.bucketfs_name, + bucket_name=self.bucket_name, + path_in_bucket=self.path_in_bucket, + ) + command_line_output_str = self.generate_command_line_output_str( + language_definition, export_info + ) self.return_object(command_line_output_str) - def generate_command_line_output_str(self, - language_definition: LanguageDefinition, - export_info: ExportInfo): + def generate_command_line_output_str( + self, language_definition: LanguageDefinition, export_info: ExportInfo + ): flavor_name = self.get_flavor_name() try: release_path = Path(export_info.cache_file).relative_to(Path("").absolute()) except ValueError: release_path = Path(export_info.cache_file) - command_line_output_str = textwrap.dedent(f""" + command_line_output_str = textwrap.dedent( + f""" Uploaded {release_path} to {self._get_upload_url(export_info, without_login=True)} @@ -69,22 +81,20 @@ def generate_command_line_output_str(self, To activate the flavor on the system: {language_definition.generate_alter_system()} - """) + """ + ) return command_line_output_str def _upload_container(self, release_info: ExportInfo): s = requests.session() url = self._get_upload_url(release_info, without_login=True) - self.logger.info( - f"Upload {release_info.cache_file} to {url}") - with open(release_info.cache_file, 'rb') as file: + self.logger.info(f"Upload {release_info.cache_file} to {url}") + with open(release_info.cache_file, "rb") as file: response = s.put(url, data=file, auth=self._create_auth_object()) response.raise_for_status() def _create_auth_object(self) -> HTTPBasicAuth: - auth = HTTPBasicAuth( - self.bucketfs_username, - self.bucketfs_password) + auth = HTTPBasicAuth(self.bucketfs_username, self.bucketfs_password) return auth def _get_upload_url(self, release_info: ExportInfo, without_login: bool = False): @@ -93,10 +103,15 @@ def _get_upload_url(self, release_info: ExportInfo, without_login: bool = False) login = "" else: login = f"""{self.bucketfs_username}:{self.bucketfs_password}@""" - path_in_bucket = f"{self.path_in_bucket}/" if self.path_in_bucket not in [None, ""] else "" - url = f"""{self._get_url_prefix()}{login}""" + \ - f"""{self.database_host}:{self.bucketfs_port}/{self.bucket_name}/{path_in_bucket}""" + \ - complete_release_name + ".tar.gz" + path_in_bucket = ( + f"{self.path_in_bucket}/" if self.path_in_bucket not in [None, ""] else "" + ) + url = ( + f"""{self._get_url_prefix()}{login}""" + + f"""{self.database_host}:{self.bucketfs_port}/{self.bucket_name}/{path_in_bucket}""" + + complete_release_name + + ".tar.gz" + ) return url def _get_complete_release_name(self, release_info: ExportInfo): diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_parameter.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_parameter.py index 2dc56e72..63c63a8c 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_parameter.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_parameter.py @@ -1,12 +1,16 @@ import luigi -from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import DependencyLoggerBaseTask +from exasol_integration_test_docker_environment.lib.base.dependency_logger_base_task import ( + DependencyLoggerBaseTask, +) class UploadContainerParameter: database_host = luigi.Parameter() bucketfs_port = luigi.IntParameter() bucketfs_username = luigi.Parameter(significant=False) - bucketfs_password = luigi.Parameter(significant=False, visibility=luigi.parameter.ParameterVisibility.HIDDEN) + bucketfs_password = luigi.Parameter( + significant=False, visibility=luigi.parameter.ParameterVisibility.HIDDEN + ) bucketfs_name = luigi.Parameter() bucket_name = luigi.Parameter() path_in_bucket = luigi.OptionalParameter() diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_task.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_task.py index 46c05ea1..e64ca615 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_task.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_task.py @@ -1,10 +1,16 @@ import importlib import luigi -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import RequiredTaskInfo +from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import ( + JsonPickleParameter, +) +from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import ( + RequiredTaskInfo, +) -from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_base_task import UploadContainerBaseTask +from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_base_task import ( + UploadContainerBaseTask, +) class UploadContainerTask(UploadContainerBaseTask): @@ -13,12 +19,20 @@ class UploadContainerTask(UploadContainerBaseTask): # don't want to wait for the push finishing before starting the build of depended images, # but we also need to create a UploadContainerTask for each ExportContainerTask of a goal - required_task_info = JsonPickleParameter(RequiredTaskInfo, - visibility=luigi.parameter.ParameterVisibility.HIDDEN, - significant=True) # type: RequiredTaskInfo + required_task_info = JsonPickleParameter( + RequiredTaskInfo, + visibility=luigi.parameter.ParameterVisibility.HIDDEN, + significant=True, + ) # type: RequiredTaskInfo def get_export_task(self): - module = importlib.import_module(self.required_task_info.module_name) - class_ = getattr(module, self.required_task_info.class_name) - instance = self.create_child_task(class_, **self.required_task_info.params) + module = importlib.import_module( + self.required_task_info.module_name # pylint: disable=no-member + ) + class_ = getattr( + module, self.required_task_info.class_name # pylint: disable=no-member + ) + instance = self.create_child_task( + class_, **self.required_task_info.params # pylint: disable=no-member + ) return instance diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_tasks_creator.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_tasks_creator.py index be18a803..bbb6b889 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_tasks_creator.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_container_tasks_creator.py @@ -1,12 +1,18 @@ from typing import Dict -from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_create_task import \ - DockerCreateImageTask -from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import RequiredTaskInfo +from exasol_integration_test_docker_environment.lib.docker.images.create.docker_image_create_task import ( + DockerCreateImageTask, +) +from exasol_integration_test_docker_environment.lib.docker.images.required_task_info import ( + RequiredTaskInfo, +) -from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_task import UploadContainerTask -from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers_parameter import \ - UploadContainersParameter +from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_task import ( + UploadContainerTask, +) +from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers_parameter import ( + UploadContainersParameter, +) class UploadContainerTasksCreator: @@ -15,21 +21,24 @@ def __init__(self, task: UploadContainersParameter): self.task = task def create_upload_tasks(self, build_tasks: Dict[str, DockerCreateImageTask]): - return {release_goal: self._create_upload_task(release_goal, build_task) - for release_goal, build_task in build_tasks.items()} + return { + release_goal: self._create_upload_task(release_goal, build_task) + for release_goal, build_task in build_tasks.items() + } def _create_upload_task(self, release_goal: str, build_task: DockerCreateImageTask): required_task_info = self._create_required_task_info(build_task) - return self.task.create_child_task_with_common_params( + return self.task.create_child_task_with_common_params( # type: ignore UploadContainerTask, required_task_info=required_task_info, - release_goal=release_goal + release_goal=release_goal, ) @staticmethod def _create_required_task_info(build_task): - required_task_info = \ - RequiredTaskInfo(module_name=build_task.__module__, - class_name=build_task.__class__.__name__, - params=build_task.param_kwargs) + required_task_info = RequiredTaskInfo( + module_name=build_task.__module__, + class_name=build_task.__class__.__name__, + params=build_task.param_kwargs, + ) return required_task_info diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers.py index 9ce64a89..e91c7dac 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers.py @@ -1,15 +1,22 @@ from typing import Dict import luigi -from exasol_integration_test_docker_environment.lib.base.flavor_task import FlavorsBaseTask - -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import DockerFlavorBuildBase -from exasol_script_languages_container_tool.lib.tasks.export.export_container_tasks_creator import \ - ExportContainerTasksCreator -from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_tasks_creator import \ - UploadContainerTasksCreator -from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers_parameter import \ - UploadContainersParameter +from exasol_integration_test_docker_environment.lib.base.flavor_task import ( + FlavorsBaseTask, +) + +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_build_base import ( + DockerFlavorBuildBase, +) +from exasol_script_languages_container_tool.lib.tasks.export.export_container_tasks_creator import ( + ExportContainerTasksCreator, +) +from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_tasks_creator import ( + UploadContainerTasksCreator, +) +from exasol_script_languages_container_tool.lib.tasks.upload.upload_containers_parameter import ( + UploadContainersParameter, +) class UploadContainers(FlavorsBaseTask, UploadContainersParameter): @@ -17,17 +24,21 @@ class UploadContainers(FlavorsBaseTask, UploadContainersParameter): def __init__(self, *args, **kwargs): self.export_info_futures = None super().__init__(*args, **kwargs) - command_line_output_path = self.get_output_path().joinpath("command_line_output") - self.command_line_output_target = luigi.LocalTarget(str(command_line_output_path)) + command_line_output_path = self.get_output_path().joinpath( + "command_line_output" + ) + self.command_line_output_target = luigi.LocalTarget( + str(command_line_output_path) + ) def register_required(self): - tasks = self.create_tasks_for_flavors_with_common_params( - UploadFlavorContainers) # type: Dict[str,UploadFlavorContainers] + tasks = self.create_tasks_for_flavors_with_common_params( # type: ignore + UploadFlavorContainers + ) # type: Dict[str,UploadFlavorContainers] self.export_info_futures = self.register_dependencies(tasks) def run_task(self): - uploads = self.get_values_from_futures( - self.export_info_futures) + uploads = self.get_values_from_futures(self.export_info_futures) self.write_command_line_output(uploads) self.return_object(self.command_line_output_target) @@ -52,8 +63,12 @@ def run_task(self): export_tasks = self.create_export_tasks(build_tasks) upload_tasks = self.create_upload_tasks(export_tasks) - command_line_output_string_futures = yield from self.run_dependencies(upload_tasks) - command_line_output_strings = self.get_values_from_futures(command_line_output_string_futures) + command_line_output_string_futures = yield from self.run_dependencies( + upload_tasks + ) + command_line_output_strings = self.get_values_from_futures( + command_line_output_string_futures + ) self.return_object(command_line_output_strings) def create_upload_tasks(self, export_tasks): diff --git a/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers_parameter.py b/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers_parameter.py index 31a4c652..2663b89a 100644 --- a/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers_parameter.py +++ b/exasol_script_languages_container_tool/lib/tasks/upload/upload_containers_parameter.py @@ -1,6 +1,8 @@ import luigi -from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_parameter import UploadContainerParameter +from exasol_script_languages_container_tool.lib.tasks.upload.upload_container_parameter import ( + UploadContainerParameter, +) class UploadContainersParameter(UploadContainerParameter): diff --git a/exasol_script_languages_container_tool/lib/utils/docker_utils.py b/exasol_script_languages_container_tool/lib/utils/docker_utils.py index 7fd6a23c..dfdbcbc1 100644 --- a/exasol_script_languages_container_tool/lib/utils/docker_utils.py +++ b/exasol_script_languages_container_tool/lib/utils/docker_utils.py @@ -1,19 +1,30 @@ -from typing import Callable, List, Dict, TextIO +from typing import Callable, Dict, List, TextIO import docker.models.containers def find_images_by_tag(client, condition: Callable[[str], bool]) -> List: images = client.images.list() - filter_images = [image for image in images - if image.tags is not None and len(image.tags) > 0 and - any([condition(tag) for tag in image.tags])] + filter_images = [ + image + for image in images + if image.tags is not None + and len(image.tags) > 0 + and any([condition(tag) for tag in image.tags]) + ] return filter_images -def exec_run_and_write_to_stream(client: docker.client, container: docker.models.containers.Container, cmd: str, - output_io: TextIO, environment: Dict) -> int: - _id = client.api.exec_create(container=container.id, cmd=cmd, environment=environment) +def exec_run_and_write_to_stream( + client: docker.client, + container: docker.models.containers.Container, + cmd: str, + output_io: TextIO, + environment: Dict, +) -> int: + _id = client.api.exec_create( + container=container.id, cmd=cmd, environment=environment + ) output_stream = client.api.exec_start(_id, detach=False, stream=True) for output_chunk in output_stream: output_io.write(output_chunk.decode("utf-8")) diff --git a/exasol_script_languages_container_tool/lib/utils/tar_safe_extract.py b/exasol_script_languages_container_tool/lib/utils/tar_safe_extract.py index 1318094d..0571fea2 100644 --- a/exasol_script_languages_container_tool/lib/utils/tar_safe_extract.py +++ b/exasol_script_languages_container_tool/lib/utils/tar_safe_extract.py @@ -12,7 +12,13 @@ def is_within_directory(directory, target): return prefix == abs_directory -def safe_extract(tar: TarFile, path: str = ".", members: Optional[List[str]] = None, *, numeric_owner: bool = False): +def safe_extract( + tar: TarFile, + path: str = ".", + members: Optional[List[str]] = None, + *, + numeric_owner: bool = False +): """ This function implements a patch for the CVE-2007-4559. The patch essentially checks to see if all tarfile members will be extracted safely and throws an exception otherwise. @@ -22,4 +28,4 @@ def safe_extract(tar: TarFile, path: str = ".", members: Optional[List[str]] = N if not is_within_directory(path, member_path): raise Exception("Attempted Path Traversal in Tar File") - tar.extractall(path=path, members=members, numeric_owner=numeric_owner) + tar.extractall(path=path, members=members, numeric_owner=numeric_owner) # type: ignore diff --git a/exasol_script_languages_container_tool/main.py b/exasol_script_languages_container_tool/main.py index bb56283b..72d871e3 100755 --- a/exasol_script_languages_container_tool/main.py +++ b/exasol_script_languages_container_tool/main.py @@ -1,7 +1,8 @@ #! /usr/bin/env python3 -from exasol_integration_test_docker_environment.cli.cli import cli -import exasol_integration_test_docker_environment.cli.commands +import exasol_integration_test_docker_environment.cli.commands # type: ignore +from exasol_integration_test_docker_environment.cli.cli import cli # type: ignore + import exasol_script_languages_container_tool.cli.commands @@ -9,5 +10,5 @@ def main(): cli() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/exasol_script_languages_container_tool/py.typed b/exasol_script_languages_container_tool/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/exasol_script_languages_container_tool/version.py b/exasol_script_languages_container_tool/version.py new file mode 100644 index 00000000..1ac2947c --- /dev/null +++ b/exasol_script_languages_container_tool/version.py @@ -0,0 +1,10 @@ +# ATTENTION: +# This file is generated by exasol/toolbox/pre_commit_hooks/package_version.py when using: +# * either "poetry run nox -s fix" +# * or "poetry run version-check --fix" +# Do not edit this file manually! +# If you need to change the version, do so in the project.toml, e.g. by using `poetry version X.Y.Z`. +MAJOR = 1 +MINOR = 0 +PATCH = 0 +VERSION = f"{MAJOR}.{MINOR}.{PATCH}" diff --git a/noxconfig.py b/noxconfig.py new file mode 100644 index 00000000..9a07c088 --- /dev/null +++ b/noxconfig.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import Iterable + +from nox import Session + + +@dataclass(frozen=True) +class Config: + root: Path = Path(__file__).parent + doc: Path = Path(__file__).parent / "doc" + version_file: Path = ( + Path(__file__).parent / "exasol_script_languages_container_tool" / "version.py" + ) + path_filters: Iterable[str] = ("dist", ".eggs", "venv", "resources") + + +PROJECT_CONFIG = Config() diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000..0738e134 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,7 @@ +import nox + +# imports all nox task provided by the toolbox +from exasol.toolbox.nox.tasks import * # type: ignore + +# default actions to be run if nothing is explicitly specified with the -s option +nox.options.sessions = ["fix"] diff --git a/poetry.lock b/poetry.lock index 14243f28..f2096012 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + [[package]] name = "anyio" version = "4.4.0" @@ -22,6 +33,34 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "argcomplete" +version = "3.5.0" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +files = [ + {file = "argcomplete-3.5.0-py3-none-any.whl", hash = "sha256:d4bcf3ff544f51e16e54228a7ac7f486ed70ebf2ecfe49a63a91171c76bf029b"}, + {file = "argcomplete-3.5.0.tar.gz", hash = "sha256:4349400469dccfb7950bb60334a680c58d88699bff6159df61251878dc6bf74b"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + +[[package]] +name = "astroid" +version = "3.2.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "24.2.0" @@ -41,6 +80,20 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "babel" +version = "2.16.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "bcrypt" version = "4.2.0" @@ -81,6 +134,73 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "24.8.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2024.7.4" @@ -171,6 +291,17 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -295,6 +426,23 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + [[package]] name = "configobj" version = "5.0.8" @@ -309,6 +457,90 @@ files = [ [package.dependencies] six = "*" +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "43.0.0" @@ -386,6 +618,32 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "docker" version = "7.1.0" @@ -409,13 +667,13 @@ websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "docutils" -version = "0.20.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] [[package]] @@ -500,6 +758,38 @@ requests = ">=2.31.0,<3.0.0" tenacity = ">=8.2.3,<9.0.0" types-requests = ">=2.31.0.6,<3.0.0.0" +[[package]] +name = "exasol-toolbox" +version = "0.14.0" +description = "" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "exasol_toolbox-0.14.0-py3-none-any.whl", hash = "sha256:3a225d36fe3ac54cd7ba97978344d76dc5f973f330af01eda6643dd880118811"}, + {file = "exasol_toolbox-0.14.0.tar.gz", hash = "sha256:dc511fce4962b4440f37202a2764377892104d499392063325486eec88dc31f2"}, +] + +[package.dependencies] +black = ">=24.0.0,<25.0.0" +coverage = ">=6.4.4,<8.0.0" +furo = ">=2022.9.15" +importlib-resources = ">=5.12.0" +isort = ">=5.12.0,<6.0.0" +mypy = ">=0.971" +myst-parser = ">=2.0.0,<4" +nox = ">=2022.8.7" +pluggy = ">=1.5.0,<2.0.0" +pre-commit = ">=3.1.1,<4.0.0" +prysk = {version = ">=0.17.0,<0.18.0", extras = ["pytest-plugin"]} +pylint = ">=2.15.4" +pytest = ">=7.2.2,<8.0.0" +pyupgrade = ">=2.38.2,<4.0.0" +shibuya = ">=2024.5.14" +sphinx = ">=5.3,<7.0" +sphinx-copybutton = ">=0.5.0,<0.6.0" +sphinx-design = ">=0.5.0,<0.6.0" +typer = {version = ">=0.7.0", extras = ["all"]} + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -534,6 +824,39 @@ paramiko = ">=2.4" [package.extras] pytest = ["pytest (>=7)"] +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "furo" +version = "2024.8.6" +description = "A clean customisable Sphinx documentation theme." +optional = false +python-versions = ">=3.8" +files = [ + {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, + {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=6.0,<9.0" +sphinx-basic-ng = ">=1.0.0.beta2" + [[package]] name = "gitdb" version = "4.0.11" @@ -636,6 +959,20 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.8" @@ -658,6 +995,17 @@ files = [ {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "importlib-metadata" version = "8.4.0" @@ -696,6 +1044,17 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] type = ["pytest-mypy"] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "invoke" version = "2.2.0" @@ -707,6 +1066,20 @@ files = [ {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "jinja2" version = "3.1.4" @@ -783,6 +1156,30 @@ jsonschema = ["jsonschema"] prometheus = ["prometheus-client (>=0.5,<0.15)"] toml = ["toml (<2.0.0)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -852,6 +1249,131 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.1" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, + {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.11.2" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "myst-parser" +version = "3.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, +] + +[package.dependencies] +docutils = ">=0.18,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + [[package]] name = "netaddr" version = "1.3.0" @@ -884,6 +1406,50 @@ doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-t extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "nox" +version = "2024.4.15" +description = "Flexible test automation." +optional = false +python-versions = ">=3.7" +files = [ + {file = "nox-2024.4.15-py3-none-any.whl", hash = "sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565"}, + {file = "nox-2024.4.15.tar.gz", hash = "sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f"}, +] + +[package.dependencies] +argcomplete = ">=1.9.4,<4.0" +colorlog = ">=2.6.1,<7.0.0" +packaging = ">=20.9" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.14.1" + +[package.extras] +tox-to-nox = ["jinja2", "tox"] +uv = ["uv (>=0.1.6)"] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + [[package]] name = "paramiko" version = "3.4.1" @@ -905,6 +1471,48 @@ all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1 gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=2.0)"] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "portalocker" version = "2.10.1" @@ -924,6 +1532,42 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prysk" +version = "0.17.0" +description = "Functional tests for command line applications" +optional = false +python-versions = ">=3.8,<4.0.0" +files = [ + {file = "prysk-0.17.0-py3-none-any.whl", hash = "sha256:c2e0ce69ede821e5a7e03f576c51e2a35000c570f6e22cf7c13daec1b3978832"}, + {file = "prysk-0.17.0.tar.gz", hash = "sha256:0a500bb9ff742eca878d5802bad9fcfd7ba1c6bbae64b2a2ff96bff94d4f8ad8"}, +] + +[package.dependencies] +pytest-prysk = {version = ">=0.2.0,<0.3.0", optional = true, markers = "extra == \"pytest-plugin\""} +rich = ">=13.3.1,<14.0.0" + +[package.extras] +pytest-plugin = ["pytest-prysk (>=0.2.0,<0.3.0)"] + [[package]] name = "pycparser" version = "2.22" @@ -954,6 +1598,49 @@ dev = ["chardet", "parameterized", "ruff"] release = ["zest.releaser[recommended]"] tests = ["chardet", "parameterized", "ruff", "tox", "unittest-parallel"] +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.2.6" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, + {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, +] + +[package.dependencies] +astroid = ">=3.2.4,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pynacl" version = "1.5.0" @@ -1005,6 +1692,43 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-prysk" +version = "0.2.0" +description = "Pytest plugin for prysk" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_prysk-0.2.0-py3-none-any.whl", hash = "sha256:3180a9d3a6634e6e70107b2eed2a6a7420630b14ba2036598ef690f9b71be79f"}, + {file = "pytest_prysk-0.2.0.tar.gz", hash = "sha256:488d1f77e35beec9cad13e11368dcc5d09555ec31a4d6a3f9d901e78bbeeb2d1"}, +] + +[package.dependencies] +prysk = ">=0.15.0" +pytest = ">=7.3.2,<8.0.0" + [[package]] name = "python-daemon" version = "3.0.1" @@ -1039,6 +1763,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pyupgrade" +version = "3.17.0" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyupgrade-3.17.0-py2.py3-none-any.whl", hash = "sha256:cbc8f67a61d3f4e7ca9c2ef57b9aae67f023d3780ce30c99fccec78401723754"}, + {file = "pyupgrade-3.17.0.tar.gz", hash = "sha256:d5dd1dcaf9a016c31508bb9d3d09fd335d736578092f91df52bb26ac30c37919"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + [[package]] name = "pywin32" version = "306" @@ -1062,6 +1800,68 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -1083,6 +1883,24 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "setuptools" version = "73.0.1" @@ -1099,6 +1917,31 @@ core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.te doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "shibuya" +version = "2024.8.26" +description = "A clean, responsive, and customizable Sphinx documentation theme with light/dark mode." +optional = false +python-versions = ">=3.7" +files = [ + {file = "shibuya-2024.8.26-py3-none-any.whl", hash = "sha256:048f19152fbac98a1cf6ee1cf40adeb4c14e8af073073eee5e6e357bc1487e4a"}, + {file = "shibuya-2024.8.26.tar.gz", hash = "sha256:49029bbaf1a7ce87a7494175504da6e9b1bdde646876af9cc73e2495839ca829"}, +] + +[package.dependencies] +Sphinx = "*" + [[package]] name = "simplejson" version = "3.19.3" @@ -1251,6 +2094,214 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "sphinx" +version = "6.2.1" +description = "Python documentation generator" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"}, + {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.20" +imagesize = ">=1.3" +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-design" +version = "0.5.0" +description = "A sphinx extension for designing beautiful, view size responsive web components." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, + {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, +] + +[package.dependencies] +sphinx = ">=5,<8" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=1,<3)"] +testing = ["myst-parser (>=1,<3)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2023.7.0,<2023.8.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.13.0,<0.14.0)"] +theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"] +theme-sbt = ["sphinx-book-theme (>=1.0,<2.0)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "stopwatch-py" version = "2.0.1" @@ -1277,6 +2328,17 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] +[[package]] +name = "tokenize-rt" +version = "6.0.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-6.0.0-py2.py3-none-any.whl", hash = "sha256:d4ff7ded2873512938b4f8cbb98c9b07118f01d30ac585a30d7a88353ca36d22"}, + {file = "tokenize_rt-6.0.0.tar.gz", hash = "sha256:b9711bdfc51210211137499b5e355d3de5ec88a85d2025c520cbb921b5194367"}, +] + [[package]] name = "toml" version = "0.10.2" @@ -1288,6 +2350,28 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + [[package]] name = "tornado" version = "6.4.1" @@ -1326,6 +2410,23 @@ typing-extensions = ">=4.10.0" doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] +[[package]] +name = "typer" +version = "0.12.5" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, + {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "types-requests" version = "2.32.0.20240712" @@ -1368,6 +2469,26 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wrapt" version = "1.16.0" @@ -1465,4 +2586,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "0f7a405d7ccc875f180311326f71c8c987678e8e8e52a98c1d2f4794a13c3156" +content-hash = "de54ad87198b1be62b090f07425f3343a8ce741a2556523e2a88121c0cbaf51f" diff --git a/pyproject.toml b/pyproject.toml index f29f7243..98978c0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,12 @@ authors = [ "Torsten Kilias " ] +packages = [ + {include = "README.md"}, + {include = "LICENSE"}, + {include = "exasol_script_languages_container_tool"}, +] + readme = 'README.md' # Markdown files are supported repository = "https://github.com/exasol/script-languages-container-tool" homepage = "https://github.com/exasol/script-languages-container-tool" @@ -36,7 +42,29 @@ build-backend = "poetry.core.masonry.api" [tool.poetry.dev-dependencies] toml = ">=0.10.2" configobj = "^5.0.8" # moved to dev dependencie so security alert does not propagate further up +exasol-toolbox = ">=0.14.0" [tool.poetry.scripts] exaslct = 'exasol_script_languages_container_tool.main:main' + +[[tool.mypy.overrides]] +module = ["exasol_integration_test_docker_environment.*", "luigi.*", "docker.*", + "humanfriendly", "configobj", "exasol.bucketfs", "toml"] +ignore_missing_imports = true + + +[tool.pylint.'MESSAGES CONTROL'] +max-line-length = 120 +disable = """ + W,R,C, + duplicate-code, + missing-class-docstring, + missing-module-docstring, + missing-function-docstring, + too-many-ancestors, + broad-exception-raised, + broad-exception-caught, + consider-using-f-string, + useless-parent-delegation +""" diff --git a/scripts/build/check_release.py b/scripts/build/check_release.py index 98150bd2..c7e1df88 100644 --- a/scripts/build/check_release.py +++ b/scripts/build/check_release.py @@ -2,8 +2,8 @@ from pathlib import Path from typing import Tuple -from git import Repo import toml +from git import Repo def get_git_version(): @@ -23,13 +23,15 @@ def version_string_to_tuple(version: str) -> Tuple[int, ...]: def get_poetry_version(): - parsed_toml = toml.load('pyproject.toml') + parsed_toml = toml.load("pyproject.toml") return parsed_toml["tool"]["poetry"]["version"].strip() def get_change_log_version(): # Path overloads __truediv__ - with open(Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md") as changelog: + with open( + Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md" + ) as changelog: changelog_str = changelog.read() # Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file. # Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group @@ -37,7 +39,7 @@ def get_change_log_version(): return version_match.groups()[0] -if __name__ == '__main__': +if __name__ == "__main__": poetry_version = get_poetry_version() latest_tag = get_git_version() changelog_version = get_change_log_version() diff --git a/test/resources/flavors/real-test-flavor/real_flavor_base/build_steps.py b/test/resources/flavors/real-test-flavor/real_flavor_base/build_steps.py index f770df86..72c3e884 100644 --- a/test/resources/flavors/real-test-flavor/real_flavor_base/build_steps.py +++ b/test/resources/flavors/real-test-flavor/real_flavor_base/build_steps.py @@ -1,6 +1,8 @@ from typing import Dict -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import DockerFlavorAnalyzeImageTask +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import ( + DockerFlavorAnalyzeImageTask, +) class AnalyzeUDFClientDeps(DockerFlavorAnalyzeImageTask): @@ -48,8 +50,7 @@ def get_build_step(self) -> str: return "build_run" def requires_tasks(self): - return {"build_deps": AnalyzeBuildDeps, - "language_deps": AnalyzeLanguageDeps} + return {"build_deps": AnalyzeBuildDeps, "language_deps": AnalyzeLanguageDeps} def get_additional_build_directories_mapping(self) -> Dict[str, str]: return {} @@ -76,8 +77,10 @@ def get_build_step(self) -> str: return "base_test_build_run" def requires_tasks(self): - return {"base_test_deps": AnalyzeBaseTestDeps, - "language_deps": AnalyzeLanguageDeps} + return { + "base_test_deps": AnalyzeBaseTestDeps, + "language_deps": AnalyzeLanguageDeps, + } def get_additional_build_directories_mapping(self) -> Dict[str, str]: return {} @@ -116,21 +119,26 @@ def get_build_step(self) -> str: return "flavor_test_build_run" def requires_tasks(self): - return {"flavor_customization": AnalyzeFlavorCustomization, - "base_test_build_run": AnalyzeBaseTestBuildRun} + return { + "flavor_customization": AnalyzeFlavorCustomization, + "base_test_build_run": AnalyzeBaseTestBuildRun, + } def get_path_in_flavor(self): return "flavor_base" + class AnalyzeRelease(DockerFlavorAnalyzeImageTask): def get_build_step(self) -> str: return "release" def requires_tasks(self): - return {"flavor_customization": AnalyzeFlavorCustomization, - "build_run": AnalyzeBuildRun, - "language_deps": AnalyzeLanguageDeps, - "language_deps": AnalyzeLanguageDeps} + return { + "flavor_customization": AnalyzeFlavorCustomization, + "build_run": AnalyzeBuildRun, + "language_deps": AnalyzeLanguageDeps, + "language_deps": AnalyzeLanguageDeps, + } def get_path_in_flavor(self): return "flavor_base" @@ -145,4 +153,3 @@ def requires_tasks(self): def get_path_in_flavor(self): return "flavor_base" - diff --git a/test/resources/flavors/test-flavor spaces/real-test-flavor/real_flavor_base/build_steps.py b/test/resources/flavors/test-flavor spaces/real-test-flavor/real_flavor_base/build_steps.py index f770df86..72c3e884 100644 --- a/test/resources/flavors/test-flavor spaces/real-test-flavor/real_flavor_base/build_steps.py +++ b/test/resources/flavors/test-flavor spaces/real-test-flavor/real_flavor_base/build_steps.py @@ -1,6 +1,8 @@ from typing import Dict -from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import DockerFlavorAnalyzeImageTask +from exasol_script_languages_container_tool.lib.tasks.build.docker_flavor_image_task import ( + DockerFlavorAnalyzeImageTask, +) class AnalyzeUDFClientDeps(DockerFlavorAnalyzeImageTask): @@ -48,8 +50,7 @@ def get_build_step(self) -> str: return "build_run" def requires_tasks(self): - return {"build_deps": AnalyzeBuildDeps, - "language_deps": AnalyzeLanguageDeps} + return {"build_deps": AnalyzeBuildDeps, "language_deps": AnalyzeLanguageDeps} def get_additional_build_directories_mapping(self) -> Dict[str, str]: return {} @@ -76,8 +77,10 @@ def get_build_step(self) -> str: return "base_test_build_run" def requires_tasks(self): - return {"base_test_deps": AnalyzeBaseTestDeps, - "language_deps": AnalyzeLanguageDeps} + return { + "base_test_deps": AnalyzeBaseTestDeps, + "language_deps": AnalyzeLanguageDeps, + } def get_additional_build_directories_mapping(self) -> Dict[str, str]: return {} @@ -116,21 +119,26 @@ def get_build_step(self) -> str: return "flavor_test_build_run" def requires_tasks(self): - return {"flavor_customization": AnalyzeFlavorCustomization, - "base_test_build_run": AnalyzeBaseTestBuildRun} + return { + "flavor_customization": AnalyzeFlavorCustomization, + "base_test_build_run": AnalyzeBaseTestBuildRun, + } def get_path_in_flavor(self): return "flavor_base" + class AnalyzeRelease(DockerFlavorAnalyzeImageTask): def get_build_step(self) -> str: return "release" def requires_tasks(self): - return {"flavor_customization": AnalyzeFlavorCustomization, - "build_run": AnalyzeBuildRun, - "language_deps": AnalyzeLanguageDeps, - "language_deps": AnalyzeLanguageDeps} + return { + "flavor_customization": AnalyzeFlavorCustomization, + "build_run": AnalyzeBuildRun, + "language_deps": AnalyzeLanguageDeps, + "language_deps": AnalyzeLanguageDeps, + } def get_path_in_flavor(self): return "flavor_base" @@ -145,4 +153,3 @@ def requires_tasks(self): def get_path_in_flavor(self): return "flavor_base" - diff --git a/test/resources/test_container/full/tests/test/docker_environment_test.py b/test/resources/test_container/full/tests/test/docker_environment_test.py index 7f7b2820..c3e7ac9f 100755 --- a/test/resources/test_container/full/tests/test/docker_environment_test.py +++ b/test/resources/test_container/full/tests/test/docker_environment_test.py @@ -1,24 +1,30 @@ #!/usr/bin/env python3 import time -from exasol_python_test_framework import udf -from exasol_python_test_framework import docker_db_environment +from exasol_python_test_framework import docker_db_environment, udf class DockerDBEnvironmentTest(udf.TestCase): def setUp(self): # The extraction of the builtin language container takes long with Exasol 8 - time.sleep(2*60) + time.sleep(2 * 60) - @udf.skipIfNot(docker_db_environment.is_available, reason="This test requires a docker-db environment") + @udf.skipIfNot( + docker_db_environment.is_available, + reason="This test requires a docker-db environment", + ) def test_connect_from_udf_to_other_container(self): schema = "test_connect_from_udf_to_other_container" env = docker_db_environment.DockerDBEnvironment(schema) try: - self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True) + self.query( + udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True + ) self.query(udf.fixindent("CREATE SCHEMA %s" % schema)) self.query(udf.fixindent("OPEN SCHEMA %s" % schema)) - self.query(udf.fixindent(''' + self.query( + udf.fixindent( + """ CREATE OR REPLACE PYTHON3 SCALAR SCRIPT connect_container(host varchar(1000), port int) returns int AS import socket def run(ctx): @@ -26,11 +32,17 @@ def run(ctx): s.connect((ctx.host, ctx.port)) return 0 / - ''')) - container = env.run(name="netcat", image="busybox:1", command="nc -v -l -s 0.0.0.0 -p 7777", ) + """ + ) + ) + container = env.run( + name="netcat", + image="busybox:1", + command="nc -v -l -s 0.0.0.0 -p 7777", + ) host = env.get_ip_address_of_container(container) - self.query("select connect_container('%s',%s)" % (host, 7777)) - self.assertTrue("connect" in container.logs().decode('utf-8')) + self.query("select connect_container('{}',{})".format(host, 7777)) + self.assertTrue("connect" in container.logs().decode("utf-8")) finally: try: self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema)) @@ -42,5 +54,5 @@ def run(ctx): pass -if __name__ == '__main__': +if __name__ == "__main__": udf.main() diff --git a/test/resources/test_container/full/tests/test/empty_test.py b/test/resources/test_container/full/tests/test/empty_test.py index 037b6d5e..5da2eee6 100755 --- a/test/resources/test_container/full/tests/test/empty_test.py +++ b/test/resources/test_container/full/tests/test/empty_test.py @@ -1,16 +1,17 @@ #!/usr/bin/env python3 -from exasol_python_test_framework import udf -from exasol_python_test_framework import docker_db_environment +from exasol_python_test_framework import docker_db_environment, udf class EmptyTest(udf.TestCase): - @udf.skipIfNot(docker_db_environment.is_available, reason="This test requires a docker-db environment") + @udf.skipIfNot( + docker_db_environment.is_available, + reason="This test requires a docker-db environment", + ) def test_dummy(self): pass -if __name__ == '__main__': +if __name__ == "__main__": udf.main() - diff --git a/test/resources/test_container/full/tests/test/test_builtin_languages.py b/test/resources/test_container/full/tests/test/test_builtin_languages.py index aabc37df..7d7cbd32 100644 --- a/test/resources/test_container/full/tests/test/test_builtin_languages.py +++ b/test/resources/test_container/full/tests/test/test_builtin_languages.py @@ -8,16 +8,22 @@ class DockerDBEnvironmentTest(udf.TestCase): def test_python3(self): schema = "DockerDBEnvironmentTest" try: - self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True) + self.query( + udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True + ) self.query(udf.fixindent("CREATE SCHEMA %s" % schema)) self.query(udf.fixindent("OPEN SCHEMA %s" % schema)) - self.query(udf.fixindent(''' + self.query( + udf.fixindent( + """ CREATE OR REPLACE PYTHON3 SCALAR SCRIPT test_python3(i int) returns int AS def run(ctx): return 0 / - ''')) + """ + ) + ) self.query("select test_python3(0)") finally: self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema)) @@ -25,10 +31,14 @@ def run(ctx): def test_java(self): schema = "DockerDBEnvironmentTest" try: - self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True) + self.query( + udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True + ) self.query(udf.fixindent("CREATE SCHEMA %s" % schema)) self.query(udf.fixindent("OPEN SCHEMA %s" % schema)) - self.query(udf.fixindent(''' + self.query( + udf.fixindent( + """ CREATE OR REPLACE JAVA SCALAR SCRIPT test_java(i int) returns int AS class TEST_JAVA { @@ -37,7 +47,9 @@ class TEST_JAVA { } } / - ''')) + """ + ) + ) self.query("select test_java(0)") finally: self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema)) @@ -45,21 +57,27 @@ class TEST_JAVA { def test_r(self): schema = "DockerDBEnvironmentTest" try: - self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True) + self.query( + udf.fixindent("DROP SCHEMA %s CASCADE" % schema), ignore_errors=True + ) self.query(udf.fixindent("CREATE SCHEMA %s" % schema)) self.query(udf.fixindent("OPEN SCHEMA %s" % schema)) - self.query(udf.fixindent(''' + self.query( + udf.fixindent( + """ CREATE OR REPLACE R SCALAR SCRIPT test_r(i int) returns int AS run <- function(ctx) { return(0); } / - ''')) + """ + ) + ) self.query("select test_r(0)") finally: self.query(udf.fixindent("DROP SCHEMA %s CASCADE" % schema)) -if __name__ == '__main__': +if __name__ == "__main__": udf.main() diff --git a/test/resources/test_container/full/tests/test/test_container_docker_credentials.py b/test/resources/test_container/full/tests/test/test_container_docker_credentials.py index 41a83716..d054a4b8 100755 --- a/test/resources/test_container/full/tests/test/test_container_docker_credentials.py +++ b/test/resources/test_container/full/tests/test/test_container_docker_credentials.py @@ -2,13 +2,15 @@ import os import docker -from exasol_python_test_framework import udf -from exasol_python_test_framework import docker_db_environment +from exasol_python_test_framework import docker_db_environment, udf class TestContainerDockerCredentials(udf.TestCase): - @udf.skipIfNot(docker_db_environment.is_available, reason="This test requires a docker-db environment") + @udf.skipIfNot( + docker_db_environment.is_available, + reason="This test requires a docker-db environment", + ) def test_check_docker_credentials(self): docker_user = os.getenv("DOCKER_USERNAME") docker_password = os.getenv("DOCKER_PASSWORD") @@ -18,5 +20,5 @@ def test_check_docker_credentials(self): client.login(username=docker_user, password=docker_password) -if __name__ == '__main__': +if __name__ == "__main__": udf.main() diff --git a/test/test_build_test_container.py b/test/test_build_test_container.py index 8f23388c..7dbf36ff 100644 --- a/test/test_build_test_container.py +++ b/test/test_build_test_container.py @@ -1,18 +1,21 @@ import unittest import docker +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore -from exasol_script_languages_container_tool.lib.utils.docker_utils import find_images_by_tag - -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +from exasol_script_languages_container_tool.lib.utils.docker_utils import ( + find_images_by_tag, +) class BuildTestContainerTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.docker_client = docker.from_env() self.test_environment.clean_images() @@ -26,13 +29,19 @@ def tearDown(self): def test_build_test_container(self): parameter = exaslct_utils.get_mock_test_container_folder_parameter() command = f"{self.test_environment.executable} build-test-container {parameter}" - self.test_environment.run_command(command, track_task_dependencies=True, use_flavor_path=False) - images = find_images_by_tag(self.docker_client, - lambda tag: tag.startswith(self.test_environment.repository_name)) - self.assertTrue(len(images) > 0, - f"Did not find images for repository " - f"{self.test_environment.repository_name} in list {images}") - - -if __name__ == '__main__': + self.test_environment.run_command( + command, track_task_dependencies=True, use_flavor_path=False + ) + images = find_images_by_tag( + self.docker_client, + lambda tag: tag.startswith(self.test_environment.repository_name), + ) + self.assertTrue( + len(images) > 0, + f"Did not find images for repository " + f"{self.test_environment.repository_name} in list {images}", + ) + + +if __name__ == "__main__": unittest.main() diff --git a/test/test_click_api_consistency.py b/test/test_click_api_consistency.py index 996ef4e3..8a19b6cb 100644 --- a/test/test_click_api_consistency.py +++ b/test/test_click_api_consistency.py @@ -1,11 +1,18 @@ +import inspect import unittest -from exasol_integration_test_docker_environment.testing.api_consistency_utils import \ - param_names_of_click_call, get_click_and_api_functions, defaults_of_click_call, get_click_and_api_function_names -from exasol_integration_test_docker_environment.testing.utils import multiassert +from exasol_integration_test_docker_environment.testing.api_consistency_utils import ( # type: ignore + defaults_of_click_call, + get_click_and_api_function_names, + get_click_and_api_functions, + param_names_of_click_call, +) +from exasol_integration_test_docker_environment.testing.utils import ( + multiassert, +) # type: ignore + from exasol_script_languages_container_tool.cli import commands from exasol_script_languages_container_tool.lib import api -import inspect class ClickApiConsistency(unittest.TestCase): @@ -14,7 +21,6 @@ def test_api_arguments(self): """ Validate that the argument lists for all commands match! """ - self.maxDiff = None click_commands, api_functions = get_click_and_api_functions(commands, api) # Now iterate over the list and compare consistency @@ -26,9 +32,18 @@ def test_api_arguments(self): if "return" in api_spec.annotations: del api_spec.annotations["return"] - multiassert([lambda: self.assertEqual(api_spec.args, cli_spec.args), - lambda: self.assertEqual(api_spec.annotations, cli_spec.annotations), - lambda: self.assertEqual(api_spec.args, param_names_of_click_call(cli_call))], self) + multiassert( + [ + lambda: self.assertEqual(api_spec.args, cli_spec.args), + lambda: self.assertEqual( + api_spec.annotations, cli_spec.annotations + ), + lambda: self.assertEqual( + api_spec.args, param_names_of_click_call(cli_call) + ), + ], + self, + ) def test_api_default_values(self): """ @@ -42,15 +57,20 @@ def test_api_default_values(self): api_spec_defaults = inspect.getfullargspec(api_call).defaults or tuple() cli_defaults = defaults_of_click_call(cli_call) - self.assertEqual(len(cli_defaults), len(api_spec_defaults), - f"{cli_call},{cli_defaults},{api_spec_defaults}") + self.assertEqual( + len(cli_defaults), + len(api_spec_defaults), + f"{cli_call},{cli_defaults},{api_spec_defaults}", + ) for api_default_value, cli_default in zip(api_spec_defaults, cli_defaults): cli_param_name, cli_default_value = cli_default if api_default_value != cli_default_value: - self.fail(f"Default value for parameter '{cli_param_name}' " - f"for method '{api_call.__name__}' does not match. " - f"API method has default value '{api_default_value}' " - f"while CLI method has default value '{cli_default_value}'") + self.fail( + f"Default value for parameter '{cli_param_name}' " + f"for method '{api_call.__name__}' does not match. " + f"API method has default value '{api_default_value}' " + f"while CLI method has default value '{cli_default_value}'" + ) def test_same_functions(self): """ @@ -58,9 +78,11 @@ def test_same_functions(self): For that we use inspect to get all classes of type click.Command in module 'commands', and on the other hand get all functions in module 'api'. The list of names from both most be identical. """ - click_command_names, api_function_names = get_click_and_api_function_names(commands, api) + click_command_names, api_function_names = get_click_and_api_function_names( + commands, api + ) self.assertEqual(click_command_names, api_function_names) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_api_build.py b/test/test_docker_api_build.py index 4b33cca7..9ebbe5a1 100644 --- a/test/test_docker_api_build.py +++ b/test/test_docker_api_build.py @@ -1,19 +1,22 @@ import unittest -import docker +import docker # type: ignore +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore from exasol_script_languages_container_tool.lib.api import build -from exasol_script_languages_container_tool.lib.utils.docker_utils import find_images_by_tag - -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +from exasol_script_languages_container_tool.lib.utils.docker_utils import ( + find_images_by_tag, +) class ApiDockerBuildTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup(self, True) + self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup( + self, True + ) self.docker_client = docker.from_env() self.test_environment.clean_all_images() @@ -26,24 +29,35 @@ def tearDown(self): def test_docker_build(self): flavor_path = exaslct_utils.get_test_flavor() - image_infos = build(flavor_path=(str(flavor_path),), - source_docker_repository_name=self.test_environment.docker_repository_name, - target_docker_repository_name=self.test_environment.docker_repository_name) + image_infos = build( + flavor_path=(str(flavor_path),), + source_docker_repository_name=self.test_environment.docker_repository_name, + target_docker_repository_name=self.test_environment.docker_repository_name, + ) assert len(image_infos) == 1 - images = find_images_by_tag(self.docker_client, - lambda tag: tag.startswith(self.test_environment.docker_repository_name)) - self.assertTrue(len(images) > 0, - f"Did not found images for repository " - f"{self.test_environment.docker_repository_name} in list {images}") + images = find_images_by_tag( + self.docker_client, + lambda tag: tag.startswith(self.test_environment.docker_repository_name), + ) + self.assertTrue( + len(images) > 0, + f"Did not found images for repository " + f"{self.test_environment.docker_repository_name} in list {images}", + ) print("image_infos", image_infos.keys()) image_infos_for_test_flavor = image_infos[str(flavor_path)] for goal, image_info in image_infos_for_test_flavor.items(): - expected_prefix = f"{image_info.target_repository_name}:{image_info.target_tag}" - images = find_images_by_tag(self.docker_client, - lambda tag: tag.startswith(expected_prefix)) - self.assertTrue(len(images) == 1, - f"Did not found image for goal '{goal}' with prefix {expected_prefix} in list {images}") - - -if __name__ == '__main__': + expected_prefix = ( + f"{image_info.target_repository_name}:{image_info.target_tag}" + ) + images = find_images_by_tag( + self.docker_client, lambda tag: tag.startswith(expected_prefix) + ) + self.assertTrue( + len(images) == 1, + f"Did not found image for goal '{goal}' with prefix {expected_prefix} in list {images}", + ) + + +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_api_export.py b/test/test_docker_api_export.py index cfae775b..9835b459 100644 --- a/test/test_docker_api_export.py +++ b/test/test_docker_api_export.py @@ -3,8 +3,8 @@ import unittest from pathlib import Path -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore from exasol_script_languages_container_tool.lib import api @@ -12,7 +12,9 @@ class ApiDockerExportTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup(self, True) + self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup( + self, True + ) self.export_path = self.test_environment.temp_dir + "/export_dir" self.test_environment.clean_all_images() @@ -20,10 +22,14 @@ def tearDown(self): utils.close_environments(self.test_environment) def test_docker_export(self): - export_result = api.export(flavor_path=(str(exaslct_utils.get_test_flavor()),), - export_path=self.export_path) + export_result = api.export( + flavor_path=(str(exaslct_utils.get_test_flavor()),), + export_path=self.export_path, + ) self.assertEqual(len(export_result.export_infos), 1) - export_infos_for_flavor = export_result.export_infos[str(exaslct_utils.get_test_flavor())] + export_infos_for_flavor = export_result.export_infos[ + str(exaslct_utils.get_test_flavor()) + ] self.assertEqual(len(export_infos_for_flavor), 1) export_info = export_infos_for_flavor["release"] exported_files = os.listdir(self.export_path) @@ -38,5 +44,5 @@ def test_docker_export(self): assert last_tf_member.path == "exasol-manifest.json" -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_api_push.py b/test/test_docker_api_push.py index f702fd3e..35fb7c8d 100644 --- a/test/test_docker_api_push.py +++ b/test/test_docker_api_push.py @@ -1,9 +1,11 @@ import unittest -from exasol_integration_test_docker_environment.testing import utils -from exasol_integration_test_docker_environment.testing.docker_registry import LocalDockerRegistryContextManager +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore +from exasol_integration_test_docker_environment.testing.docker_registry import ( + LocalDockerRegistryContextManager, +) # type: ignore -import utils as exaslct_utils from exasol_script_languages_container_tool.lib import api @@ -11,27 +13,44 @@ class ApiDockerPushTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup(self, True) + self.test_environment = exaslct_utils.ExaslctApiTestEnvironmentWithCleanup( + self, True + ) self.test_environment.clean_all_images() def tearDown(self): utils.close_environments(self.test_environment) def test_docker_push(self): - with LocalDockerRegistryContextManager(self.test_environment.name) as local_registry: + with LocalDockerRegistryContextManager( + self.test_environment.name + ) as local_registry: self.test_environment.docker_repository_name = local_registry.name - image_infos = api.push(flavor_path=(str(exaslct_utils.get_test_flavor()),), - source_docker_repository_name=self.test_environment.docker_repository_name, - target_docker_repository_name=self.test_environment.docker_repository_name) + image_infos = api.push( + flavor_path=(str(exaslct_utils.get_test_flavor()),), + source_docker_repository_name=self.test_environment.docker_repository_name, + target_docker_repository_name=self.test_environment.docker_repository_name, + ) print("repos:", local_registry.repositories) self.assertIn(str(exaslct_utils.get_test_flavor()), image_infos) images = local_registry.images print("images", images) images_info_list = image_infos[str(exaslct_utils.get_test_flavor())] - images_info_list_tags = list(set(image_info.target_tag for image_info in images_info_list)).sort() - self.assertEqual(images["tags"].sort(), images_info_list_tags, - f"{images} doesn't have the expected tags, it only has {len(images['tags'])}") - - -if __name__ == '__main__': + print("images_info_list", images_info_list) + images_info_list_tags = sorted( + list( + { + f"{image_info.target_tag}_{image_info.hash}" + for image_info in images_info_list + } + ) + ) + self.assertEqual( + sorted(images["tags"]), + images_info_list_tags, + f"{images} doesn't have the expected tags, it only has {len(images['tags'])}", + ) + + +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_build.py b/test/test_docker_build.py index b177d9fd..99f9c4c0 100644 --- a/test/test_docker_build.py +++ b/test/test_docker_build.py @@ -1,18 +1,21 @@ import unittest import docker +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore -from exasol_script_languages_container_tool.lib.utils.docker_utils import find_images_by_tag - -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +from exasol_script_languages_container_tool.lib.utils.docker_utils import ( + find_images_by_tag, +) class DockerBuildTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.docker_client = docker.from_env() self.test_environment.clean_images() @@ -26,12 +29,16 @@ def tearDown(self): def test_docker_build(self): command = f"{self.test_environment.executable} build" self.test_environment.run_command(command, track_task_dependencies=True) - images = find_images_by_tag(self.docker_client, - lambda tag: tag.startswith(self.test_environment.repository_name)) - self.assertTrue(len(images) > 0, - f"Did not found images for repository " - f"{self.test_environment.repository_name} in list {images}") - - -if __name__ == '__main__': + images = find_images_by_tag( + self.docker_client, + lambda tag: tag.startswith(self.test_environment.repository_name), + ) + self.assertTrue( + len(images) > 0, + f"Did not found images for repository " + f"{self.test_environment.repository_name} in list {images}", + ) + + +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_clean.py b/test/test_docker_clean.py index 66031978..055f282e 100644 --- a/test/test_docker_clean.py +++ b/test/test_docker_clean.py @@ -1,12 +1,14 @@ import unittest -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error class DockerClean(unittest.TestCase): def setUp(self): - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) def tearDown(self): try: @@ -19,5 +21,5 @@ def test_docker_clean_all_images(self): self.test_environment.run_command(command, use_flavor_path=False, clean=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_export.py b/test/test_docker_export.py index ae46a9b8..a8e05e64 100644 --- a/test/test_docker_export.py +++ b/test/test_docker_export.py @@ -2,14 +2,16 @@ import tarfile import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class DockerExportTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.export_path = self.test_environment.temp_dir + "/export_dir" self.test_environment.clean_images() @@ -20,19 +22,24 @@ def test_docker_export(self): command = f"{self.test_environment.executable} export --export-path {self.export_path}" self.test_environment.run_command(command, track_task_dependencies=True) exported_files = os.listdir(self.export_path) - self.assertEqual(sorted(list(exported_files)), - sorted(['test-flavor_release.tar.gz', 'test-flavor_release.tar.gz.sha512sum']), - f"Did not found saved files for repository {self.test_environment.repository_name} " - f"in list {exported_files}") - - #Verify that "exasol-manifest.json" is the last file in the Tar archive - with tarfile.open(os.path.join(self.export_path, 'test-flavor_release.tar.gz'), "r:*") as tf: + self.assertEqual( + sorted(list(exported_files)), + sorted( + ["test-flavor_release.tar.gz", "test-flavor_release.tar.gz.sha512sum"] + ), + f"Did not found saved files for repository {self.test_environment.repository_name} " + f"in list {exported_files}", + ) + + # Verify that "exasol-manifest.json" is the last file in the Tar archive + with tarfile.open( + os.path.join(self.export_path, "test-flavor_release.tar.gz"), "r:*" + ) as tf: tf_members = tf.getmembers() last_tf_member = tf_members[-1] assert last_tf_member.name == "exasol-manifest.json" assert last_tf_member.path == "exasol-manifest.json" - -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_load.py b/test/test_docker_load.py index 2515e1cd..9d079219 100644 --- a/test/test_docker_load.py +++ b/test/test_docker_load.py @@ -4,14 +4,16 @@ import unittest from pathlib import Path -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error class DockerLoadTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.save_path = self.test_environment.temp_dir + "/save_dir" self.test_environment.clean_images() self.save() @@ -39,5 +41,5 @@ def test_docker_load(self): self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_pull.py b/test/test_docker_pull.py index 9def4426..c7dbefe0 100644 --- a/test/test_docker_pull.py +++ b/test/test_docker_pull.py @@ -1,23 +1,28 @@ import os import unittest -from exasol_integration_test_docker_environment.testing.docker_registry import LocalDockerRegistryContextManager - -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing.docker_registry import ( + LocalDockerRegistryContextManager, +) class DockerPullTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): self.test_environment.close() def test_docker_pull(self): - with LocalDockerRegistryContextManager(name=self.test_environment.name) as local_registry: + with LocalDockerRegistryContextManager( + name=self.test_environment.name + ) as local_registry: self.test_environment.repository_name = local_registry.name self.test_environment.clean_images() command = f"{self.test_environment.executable} push " @@ -25,10 +30,15 @@ def test_docker_pull(self): self.test_environment.clean_images() command = f"{self.test_environment.executable} build " self.test_environment.run_command(command, track_task_dependencies=True) - docker_pull_image_tasks = self.find_all("timers", "DockerPullImageTask", self.test_environment.temp_dir) + docker_pull_image_tasks = self.find_all( + "timers", "DockerPullImageTask", self.test_environment.temp_dir + ) print(docker_pull_image_tasks) - self.assertEqual(len(docker_pull_image_tasks), 3, - f"{docker_pull_image_tasks} doesn't contain the expected 3 docker_pull_image_tasks") + self.assertEqual( + len(docker_pull_image_tasks), + 3, + f"{docker_pull_image_tasks} doesn't contain the expected 3 docker_pull_image_tasks", + ) def find_all(self, search_root, search_name, path): result = [] @@ -43,5 +53,5 @@ def find_all(self, search_root, search_name, path): return result -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_push.py b/test/test_docker_push.py index 9684b670..b7765722 100644 --- a/test/test_docker_push.py +++ b/test/test_docker_push.py @@ -1,31 +1,39 @@ import unittest -from exasol_integration_test_docker_environment.testing.docker_registry import LocalDockerRegistryContextManager - -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing.docker_registry import ( + LocalDockerRegistryContextManager, +) # type: ignore class DockerPushTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): self.test_environment.close() def test_docker_push(self): - with LocalDockerRegistryContextManager(self.test_environment.name) as local_registry: + with LocalDockerRegistryContextManager( + self.test_environment.name + ) as local_registry: self.test_environment.repository_name = local_registry.name command = f"{self.test_environment.executable} push " self.test_environment.run_command(command, track_task_dependencies=True) print("repos:", local_registry.repositories) images = local_registry.images print("images", images) - self.assertEqual(len(images["tags"]), 10, - f"{images} doesn't have the expected 10 tags, it only has {len(images['tags'])}") + self.assertEqual( + len(images["tags"]), + 10, + f"{images} doesn't have the expected 10 tags, it only has {len(images['tags'])}", + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_save.py b/test/test_docker_save.py index f4393800..13f052b8 100644 --- a/test/test_docker_save.py +++ b/test/test_docker_save.py @@ -4,15 +4,17 @@ import unittest from pathlib import Path -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class DockerSaveTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.save_path = self.test_environment.temp_dir + "/save_dir" self.test_environment.clean_images() @@ -26,11 +28,15 @@ def tearDown(self): def test_docker_save(self): command = f"{self.test_environment.executable} save --save-directory {self.save_path} " self.test_environment.run_command(command, track_task_dependencies=True) - saved_files = os.listdir(Path(self.save_path).joinpath(self.test_environment.repository_name).parent) - self.assertTrue(len(saved_files) > 0, - f"Did not found saved files for repository {self.test_environment.repository_name} " - f"in list {saved_files}") + saved_files = os.listdir( + Path(self.save_path).joinpath(self.test_environment.repository_name).parent + ) + self.assertTrue( + len(saved_files) > 0, + f"Did not found saved files for repository {self.test_environment.repository_name} " + f"in list {saved_files}", + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_docker_upload.py b/test/test_docker_upload.py index 5ce03dd1..0add785a 100644 --- a/test/test_docker_upload.py +++ b/test/test_docker_upload.py @@ -2,8 +2,8 @@ import subprocess import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class DockerUploadTest(unittest.TestCase): @@ -11,11 +11,14 @@ class DockerUploadTest(unittest.TestCase): @classmethod def setUpClass(cls): print(f"SetUpClass: {cls.__name__}") - cls.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(cls, exaslct_utils.EXASLCT_DEFAULT_BIN) + cls.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + cls, exaslct_utils.EXASLCT_DEFAULT_BIN + ) cls.test_environment.clean_images() cls.docker_environment_name = cls.__name__ - cls.docker_environments = \ - cls.test_environment.spawn_docker_test_environments(cls.docker_environment_name) + cls.docker_environments = cls.test_environment.spawn_docker_test_environments( + cls.docker_environment_name + ) if "GOOGLE_CLOUD_BUILD" in os.environ: cls.docker_environment = cls.docker_environments.google_cloud_environment else: @@ -30,68 +33,82 @@ def test_docker_upload_with_path_in_bucket(self): self.release_name = "TEST" self.bucketfs_name = "bfsdefault" self.bucket_name = "default" - arguments = " ".join([ - f"--database-host {self.docker_environment.database_host}", - f"--bucketfs-port {self.docker_environment.ports.bucketfs}", - f"--bucketfs-username {self.docker_environment.bucketfs_username}", - f"--bucketfs-password {self.docker_environment.bucketfs_password}", - f"--bucketfs-name {self.bucketfs_name}", - f"--bucket-name {self.bucket_name}", - f"--path-in-bucket {self.path_in_bucket}", - f"--no-bucketfs-https", - f"--release-name {self.release_name}", - ]) + arguments = " ".join( + [ + f"--database-host {self.docker_environment.database_host}", + f"--bucketfs-port {self.docker_environment.ports.bucketfs}", + f"--bucketfs-username {self.docker_environment.bucketfs_username}", + f"--bucketfs-password {self.docker_environment.bucketfs_password}", + f"--bucketfs-name {self.bucketfs_name}", + f"--bucket-name {self.bucket_name}", + f"--path-in-bucket {self.path_in_bucket}", + f"--no-bucketfs-https", + f"--release-name {self.release_name}", + ] + ) command = f"{self.test_environment.executable} upload {arguments}" - completed_process = self.test_environment.run_command(command, track_task_dependencies=True, - capture_output=True) + completed_process = self.test_environment.run_command( + command, track_task_dependencies=True, capture_output=True + ) self.assertIn( - f"ALTER SESSION SET SCRIPT_LANGUAGES=\'PYTHON3_TEST=localzmq+protobuf:///{self.bucketfs_name}/" + f"ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///{self.bucketfs_name}/" f"{self.bucket_name}/{self.path_in_bucket}/test-flavor-release-{self.release_name}?lang=python#buckets/" f"{self.bucketfs_name}/{self.bucket_name}/{self.path_in_bucket}/test-flavor-release-{self.release_name}/" f"exaudf/exaudfclient_py3", - completed_process.stdout.decode("UTF-8")) - self.validate_file_on_bucket_fs(f"{self.path_in_bucket}/test-flavor-release-{self.release_name}.tar.gz") + completed_process.stdout.decode("UTF-8"), + ) + self.validate_file_on_bucket_fs( + f"{self.path_in_bucket}/test-flavor-release-{self.release_name}.tar.gz" + ) def test_docker_upload_without_path_in_bucket(self): self.release_name = "TEST" self.bucketfs_name = "bfsdefault" self.bucket_name = "default" - arguments = " ".join([ - f"--database-host {self.docker_environment.database_host}", - f"--bucketfs-port {self.docker_environment.ports.bucketfs}", - f"--bucketfs-username {self.docker_environment.bucketfs_username}", - f"--bucketfs-password {self.docker_environment.bucketfs_password}", - f"--bucketfs-name {self.bucketfs_name}", - f"--bucket-name {self.bucket_name}", - f"--no-bucketfs-https", - f"--release-name {self.release_name}", - ]) + arguments = " ".join( + [ + f"--database-host {self.docker_environment.database_host}", + f"--bucketfs-port {self.docker_environment.ports.bucketfs}", + f"--bucketfs-username {self.docker_environment.bucketfs_username}", + f"--bucketfs-password {self.docker_environment.bucketfs_password}", + f"--bucketfs-name {self.bucketfs_name}", + f"--bucket-name {self.bucket_name}", + f"--no-bucketfs-https", + f"--release-name {self.release_name}", + ] + ) command = f"{self.test_environment.executable} upload {arguments}" - completed_process = self.test_environment.run_command(command, - track_task_dependencies=True, capture_output=True) + completed_process = self.test_environment.run_command( + command, track_task_dependencies=True, capture_output=True + ) self.assertIn( - f"ALTER SESSION SET SCRIPT_LANGUAGES=\'PYTHON3_TEST=localzmq+protobuf:///{self.bucketfs_name}/" + f"ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///{self.bucketfs_name}/" f"{self.bucket_name}/test-flavor-release-{self.release_name}?lang=python#buckets/" f"{self.bucketfs_name}/{self.bucket_name}/test-flavor-release-{self.release_name}/exaudf/exaudfclient_py3", - completed_process.stdout.decode("UTF-8")) - self.validate_file_on_bucket_fs(f"test-flavor-release-{self.release_name}.tar.gz") + completed_process.stdout.decode("UTF-8"), + ) + self.validate_file_on_bucket_fs( + f"test-flavor-release-{self.release_name}.tar.gz" + ) def test_docker_upload_fail_path_in_bucket(self): self.release_name = "TEST" self.bucketfs_name = "bfsdefault" self.bucket_name = "default" - arguments = " ".join([ - f"--database-host {self.docker_environment.database_host}", - f"--bucketfs-port {self.docker_environment.ports.bucketfs}", - f"--bucketfs-username {self.docker_environment.bucketfs_username}", - f"--bucketfs-password invalid", - f"--bucketfs-name {self.bucketfs_name}", - f"--bucket-name {self.bucket_name}", - f"--no-bucketfs-https", - f"--release-name {self.release_name}", - ]) + arguments = " ".join( + [ + f"--database-host {self.docker_environment.database_host}", + f"--bucketfs-port {self.docker_environment.ports.bucketfs}", + f"--bucketfs-username {self.docker_environment.bucketfs_username}", + f"--bucketfs-password invalid", + f"--bucketfs-name {self.bucketfs_name}", + f"--bucket-name {self.bucket_name}", + f"--no-bucketfs-https", + f"--release-name {self.release_name}", + ] + ) command = f"{self.test_environment.executable} upload {arguments}" exception_thrown = False @@ -103,14 +120,21 @@ def test_docker_upload_fail_path_in_bucket(self): def validate_file_on_bucket_fs(self, expected_file_path: str): url = "http://w:{password}@{host}:{port}/{bucket}".format( - host=self.docker_environment.database_host, port=self.docker_environment.ports.bucketfs, - bucket=self.bucket_name, password=self.docker_environment.bucketfs_password) + host=self.docker_environment.database_host, # type: ignore + port=self.docker_environment.ports.bucketfs, # type: ignore + bucket=self.bucket_name, + password=self.docker_environment.bucketfs_password, # type: ignore + ) cmd = ["curl", "--silent", "--show-error", "--fail", url] p = subprocess.run(cmd, capture_output=True) p.check_returncode() - found_lines = [line for line in p.stdout.decode("utf-8").split("\n") if line == expected_file_path] + found_lines = [ + line + for line in p.stdout.decode("utf-8").split("\n") + if line == expected_file_path + ] assert len(found_lines) == 1 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_generate_language_activation.py b/test/test_generate_language_activation.py index 74d02bc4..b98a583d 100644 --- a/test/test_generate_language_activation.py +++ b/test/test_generate_language_activation.py @@ -1,34 +1,56 @@ import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class GenerateLanguageActivationTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) def tearDown(self): utils.close_environments(self.test_environment) def test_generate_with_path_in_bucket(self): - command = f"{self.test_environment.executable} generate-language-activation --bucketfs-name bfsdefault --bucket-name default --path-in-bucket path --container-name container" - completed_process = self.test_environment.run_command(command, use_docker_repository=False, - use_output_directory=False, capture_output=True) + command = ( + f"{self.test_environment.executable} generate-language-activation --bucketfs-name bfsdefault " + f"--bucket-name default --path-in-bucket path --container-name container" + ) + completed_process = self.test_environment.run_command( + command, + use_docker_repository=False, + use_output_directory=False, + capture_output=True, + ) self.assertIn( - "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///bfsdefault/default/path/container?lang=python#buckets/bfsdefault/default/path/container/exaudf/exaudfclient_py3';", - completed_process.stdout.decode("UTF-8")) + "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=" + "localzmq+protobuf:///bfsdefault/default/path/container?lang=" + "python#buckets/bfsdefault/default/path/container/exaudf/exaudfclient_py3';", + completed_process.stdout.decode("UTF-8"), + ) def test_generate_without_path_in_bucket(self): - command = f"{self.test_environment.executable} generate-language-activation --bucketfs-name bfsdefault --bucket-name default --container-name container" - completed_process = self.test_environment.run_command(command, use_docker_repository=False, - use_output_directory=False, capture_output=True) + command = ( + f"{self.test_environment.executable} generate-language-activation --bucketfs-name bfsdefault " + f"--bucket-name default --container-name container" + ) + completed_process = self.test_environment.run_command( + command, + use_docker_repository=False, + use_output_directory=False, + capture_output=True, + ) self.assertIn( - "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///bfsdefault/default/container?lang=python#buckets/bfsdefault/default/container/exaudf/exaudfclient_py3';", - completed_process.stdout.decode("UTF-8")) + "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=" + "localzmq+protobuf:///bfsdefault/default/container?lang=" + "python#buckets/bfsdefault/default/container/exaudf/exaudfclient_py3';", + completed_process.stdout.decode("UTF-8"), + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_install_starter_scripts.py b/test/test_install_starter_scripts.py index d0e83e48..4d39aba8 100644 --- a/test/test_install_starter_scripts.py +++ b/test/test_install_starter_scripts.py @@ -1,16 +1,16 @@ +import filecmp import shlex import subprocess import tempfile import unittest - -import filecmp from pathlib import Path import importlib_metadata import importlib_resources -from exasol_script_languages_container_tool.lib.tasks.install_starter_scripts.run_starter_script_installation import \ - run_starter_script_installation +from exasol_script_languages_container_tool.lib.tasks.install_starter_scripts.run_starter_script_installation import ( + run_starter_script_installation, +) PACKAGE_IDENTITY = "exasol-script-languages-container-tool" MODULE_IDENTITY = PACKAGE_IDENTITY.replace("-", "_") @@ -22,11 +22,17 @@ class InstallStarterScriptTests(unittest.TestCase): def test_positive(self): with tempfile.TemporaryDirectory() as target_dir: target_path = Path(target_dir) - run_starter_script_installation(target_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR, False) + run_starter_script_installation( + target_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR, False + ) - exaslct_script_path = importlib_resources.files(MODULE_IDENTITY) / "starter_scripts" + exaslct_script_path = ( + importlib_resources.files(MODULE_IDENTITY) / "starter_scripts" + ) self.assertTrue(exaslct_script_path.is_dir()) - cmp_res = filecmp.dircmp(exaslct_script_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR) + cmp_res = filecmp.dircmp( + exaslct_script_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR + ) self.assertTrue(len(cmp_res.common) > 0) self.assertEqual(len(cmp_res.left_only), 0) self.assertEqual(len(cmp_res.right_only), 1) @@ -35,22 +41,44 @@ def test_positive(self): self.assertTrue(exaslct_link.exists() and exaslct_link.is_symlink()) def _build_docker_runner(self): - build_docker_runner_img_script = Path(__file__).parent.parent.absolute() / "scripts" / "build" / "build_docker_runner_image.sh" - completed_process = subprocess.run([str(build_docker_runner_img_script)], stdout=subprocess.PIPE) + build_docker_runner_img_script = ( + Path(__file__).parent.parent.absolute() + / "scripts" + / "build" + / "build_docker_runner_image.sh" + ) + completed_process = subprocess.run( + [str(build_docker_runner_img_script)], stdout=subprocess.PIPE + ) completed_process.check_returncode() current_runner_image_name = completed_process.stdout.decode("utf-8").strip("\n") return current_runner_image_name - def _build_docker_runner_release_tag(self, current_runner_image_name: str, script_dir: str): - construct_docker_runner_image_script = Path(script_dir) / TARGET_EXASLCT_SCRIPTS_DIR / \ - "construct_docker_runner_image_name.sh" + def _build_docker_runner_release_tag( + self, current_runner_image_name: str, script_dir: str + ): + construct_docker_runner_image_script = ( + Path(script_dir) + / TARGET_EXASLCT_SCRIPTS_DIR + / "construct_docker_runner_image_name.sh" + ) version = importlib_metadata.version(MODULE_IDENTITY) - completed_process = subprocess.run(["bash", str(construct_docker_runner_image_script), f"{version}"], - stdout=subprocess.PIPE) + completed_process = subprocess.run( + ["bash", str(construct_docker_runner_image_script), f"{version}"], + stdout=subprocess.PIPE, + ) completed_process.check_returncode() - target_docker_runner_image_name = completed_process.stdout.decode("utf-8").strip("\n") - completed_process = subprocess.run(["docker", "tag", - current_runner_image_name, target_docker_runner_image_name]) + target_docker_runner_image_name = completed_process.stdout.decode( + "utf-8" + ).strip("\n") + completed_process = subprocess.run( + [ + "docker", + "tag", + current_runner_image_name, + target_docker_runner_image_name, + ] + ) completed_process.check_returncode() @@ -61,26 +89,35 @@ def test_execute_help(self): with tempfile.TemporaryDirectory() as target_dir: target_path = Path(target_dir) - #Now we install the starter scripts - run_starter_script_installation(target_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR, False) + # Now we install the starter scripts + run_starter_script_installation( + target_path, target_path / TARGET_EXASLCT_SCRIPTS_DIR, False + ) # Now we use the 'construct_docker_runner_image_name.sh' in # the installed scripts to get the name of the correct tag. # This tag must match with what 'exaslct' will try to use later. # Thus we also test if the installed version of 'construct_docker_runner_image_name.sh' works as expected. - self._build_docker_runner_release_tag(script_dir=target_dir, - current_runner_image_name=current_runner_image_name) + self._build_docker_runner_release_tag( + script_dir=target_dir, + current_runner_image_name=current_runner_image_name, + ) # Finally we call the installed version of 'exaslct'. # This is supposed to use the previously generated docker runner image. command = f"{target_dir}/exaslct --help" - completed_process = subprocess.run(shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + completed_process = subprocess.run( + shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) try: completed_process.check_returncode() except subprocess.CalledProcessError as ex: - print(f"Error executing exaslct. Log is \n'{completed_process.stdout.decode('utf-8')}'", flush=True) + print( + f"Error executing exaslct. Log is \n'{completed_process.stdout.decode('utf-8')}'", + flush=True, + ) raise ex -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_language_definition.py b/test/test_language_definition.py index f42ddd26..3619ff8e 100644 --- a/test/test_language_definition.py +++ b/test/test_language_definition.py @@ -1,8 +1,10 @@ import unittest - -from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import LanguageDefinition from test.utils import get_real_test_flavor +from exasol_script_languages_container_tool.lib.tasks.upload.language_definition import ( + LanguageDefinition, +) + class LanguageDefintionTest(unittest.TestCase): flavor_path = str(get_real_test_flavor()) @@ -14,11 +16,14 @@ def test_add_missing_builtin_true(self): bucketfs_name="bucketfs_name", bucket_name="bucket_name", path_in_bucket="path_in_bucket", - add_missing_builtin=True + add_missing_builtin=True, ) self.assertEqual( - "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3 JAVA=builtin_java PYTHON=builtin_python PYTHON3=builtin_python3 R=builtin_r", - language_definition.generate_definition()) + "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3" + " JAVA=builtin_java PYTHON=builtin_python PYTHON3=builtin_python3 R=builtin_r", + language_definition.generate_definition(), + ) def test_add_missing_builtin_false(self): language_definition = LanguageDefinition( @@ -27,11 +32,13 @@ def test_add_missing_builtin_false(self): bucketfs_name="bucketfs_name", bucket_name="bucket_name", path_in_bucket="path_in_bucket", - add_missing_builtin=False + add_missing_builtin=False, ) self.assertEqual( - "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3", - language_definition.generate_definition()) + "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3", + language_definition.generate_definition(), + ) def test_path_in_bucket_none(self): language_definition = LanguageDefinition( @@ -42,8 +49,10 @@ def test_path_in_bucket_none(self): path_in_bucket=None, ) self.assertEqual( - "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/release_name?lang=python#buckets/bucketfs_name/bucket_name/release_name/exaudf/exaudfclient_py3", - language_definition.generate_definition()) + "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/release_name/exaudf/exaudfclient_py3", + language_definition.generate_definition(), + ) def test_path_in_bucket_empyt_string(self): language_definition = LanguageDefinition( @@ -54,8 +63,10 @@ def test_path_in_bucket_empyt_string(self): path_in_bucket="", ) self.assertEqual( - "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/release_name?lang=python#buckets/bucketfs_name/bucket_name/release_name/exaudf/exaudfclient_py3", - language_definition.generate_definition()) + "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/release_name/exaudf/exaudfclient_py3", + language_definition.generate_definition(), + ) def test_path_in_bucket_not_none(self): language_definition = LanguageDefinition( @@ -66,8 +77,10 @@ def test_path_in_bucket_not_none(self): path_in_bucket="path_in_bucket", ) self.assertEqual( - "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3", - language_definition.generate_definition()) + "PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3", + language_definition.generate_definition(), + ) def test_alter_system(self): language_definition = LanguageDefinition( @@ -78,8 +91,11 @@ def test_alter_system(self): path_in_bucket="path_in_bucket", ) self.assertEqual( - "ALTER SYSTEM SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3';", - language_definition.generate_alter_system()) + "ALTER SYSTEM SET SCRIPT_LANGUAGES='PYTHON3_TEST=" + "localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3';", + language_definition.generate_alter_system(), + ) def test_alter_session(self): language_definition = LanguageDefinition( @@ -90,9 +106,12 @@ def test_alter_session(self): path_in_bucket="path_in_bucket", ) self.assertEqual( - "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3';", - language_definition.generate_alter_session()) + "ALTER SESSION SET SCRIPT_LANGUAGES='PYTHON3_TEST=" + "localzmq+protobuf:///bucketfs_name/bucket_name/path_in_bucket/release_name?lang=" + "python#buckets/bucketfs_name/bucket_name/path_in_bucket/release_name/exaudf/exaudfclient_py3';", + language_definition.generate_alter_session(), + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_push_test_container.py b/test/test_push_test_container.py index 54bd94f0..20f8e51c 100644 --- a/test/test_push_test_container.py +++ b/test/test_push_test_container.py @@ -1,29 +1,38 @@ import unittest -from exasol_integration_test_docker_environment.testing.docker_registry import LocalDockerRegistryContextManager - -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing.docker_registry import ( + LocalDockerRegistryContextManager, +) # type: ignore class PushTestContainerTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): self.test_environment.close() def test_push_test_container(self): - with LocalDockerRegistryContextManager(self.test_environment.name) as local_registry: + with LocalDockerRegistryContextManager( + self.test_environment.name + ) as local_registry: self.test_environment.repository_name = local_registry.name parameter = exaslct_utils.get_mock_test_container_folder_parameter() - command = f"{self.test_environment.executable} push-test-container {parameter}" - self.test_environment.run_command(command, track_task_dependencies=True, use_flavor_path=False) + command = ( + f"{self.test_environment.executable} push-test-container {parameter}" + ) + self.test_environment.run_command( + command, track_task_dependencies=True, use_flavor_path=False + ) images = local_registry.images - self.assertRegex(images["tags"][0],"db-test-container_.*") + self.assertRegex(images["tags"][0], "db-test-container_.*") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_builtin_languages.py b/test/test_run_db_test_builtin_languages.py index a012cae6..1383a3ec 100644 --- a/test/test_run_db_test_builtin_languages.py +++ b/test/test_run_db_test_builtin_languages.py @@ -1,14 +1,16 @@ import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class RunDBTestBuiltinLanguagesTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): @@ -16,15 +18,17 @@ def tearDown(self): def test_builtin_languages(self): # optionally add "--reuse-test-environment" here - command = " ".join([ - str(self.test_environment.executable), - "run-db-test", - "--test-file", - "test_builtin_languages.py", - exaslct_utils.get_full_test_container_folder_parameter(), - ]) + command = " ".join( + [ + str(self.test_environment.executable), + "run-db-test", + "--test-file", + "test_builtin_languages.py", + exaslct_utils.get_full_test_container_folder_parameter(), + ] + ) self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_docker_credentials.py b/test/test_run_db_test_docker_credentials.py index 0c3797e5..b87499bf 100644 --- a/test/test_run_db_test_docker_credentials.py +++ b/test/test_run_db_test_docker_credentials.py @@ -1,35 +1,43 @@ import os import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class RunDBTestDockerCredentials(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): utils.close_environments(self.test_environment) - @unittest.skipIf(os.getenv("DOCKER_USER") is not None and os.getenv("DOCKER_PASSWD") is not None, - "Docker credentials not configured") + @unittest.skipIf( + os.getenv("DOCKER_USER") is not None and os.getenv("DOCKER_PASSWD") is not None, + "Docker credentials not configured", + ) def test_docker_credentials_injection_into_test_container(self): docker_user = os.getenv("DOCKER_USER") docker_password = os.getenv("DOCKER_PASSWD") - arguments = " ".join([ - f"--source-docker-username={docker_user}", - f"--source-docker-password={docker_password}", - exaslct_utils.get_full_test_container_folder_parameter() - ]) - - command = f"{self.test_environment.executable} run-db-test {arguments} " \ - f"--test-file test_container_docker_credentials.py" + arguments = " ".join( + [ + f"--source-docker-username={docker_user}", + f"--source-docker-password={docker_password}", + exaslct_utils.get_full_test_container_folder_parameter(), + ] + ) + + command = ( + f"{self.test_environment.executable} run-db-test {arguments} " + f"--test-file test_container_docker_credentials.py" + ) self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_docker_db.py b/test/test_run_db_test_docker_db.py index 9baf9dfc..76252bb2 100644 --- a/test/test_run_db_test_docker_db.py +++ b/test/test_run_db_test_docker_db.py @@ -2,16 +2,19 @@ import unittest from pathlib import Path -from exasol_integration_test_docker_environment.lib.docker.container.utils import remove_docker_container - -import utils as exaslct_utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.lib.docker.container.utils import ( + remove_docker_container, +) class DockerRunDBTestDockerDBTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): @@ -19,15 +22,20 @@ def tearDown(self): self.test_environment.close() def remove_docker_container(self): - remove_docker_container([f"test_container_{self.test_environment.name}", - f"db_container_{self.test_environment.name}"]) + remove_docker_container( + [ + f"test_container_{self.test_environment.name}", + f"db_container_{self.test_environment.name}", + ] + ) def test_run_db_tests_docker_db(self): - command = f"{self.test_environment.executable} run-db-test " \ - f"{exaslct_utils.get_full_test_container_folder_parameter()}" - self.test_environment.run_command( - command, track_task_dependencies=True) + command = ( + f"{self.test_environment.executable} run-db-test " + f"{exaslct_utils.get_full_test_container_folder_parameter()}" + ) + self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_docker_db_check_arguments.py b/test/test_run_db_test_docker_db_check_arguments.py index 591616ce..e6c63e82 100644 --- a/test/test_run_db_test_docker_db_check_arguments.py +++ b/test/test_run_db_test_docker_db_check_arguments.py @@ -4,19 +4,26 @@ from pathlib import Path import docker +import utils as exaslct_utils # type: ignore # pylint: disable=import-error from configobj import ConfigObj -from exasol_integration_test_docker_environment.lib.docker.container.utils import remove_docker_container -from exasol_integration_test_docker_environment.lib.docker.volumes.utils import remove_docker_volumes -from exasol_integration_test_docker_environment.lib.data.environment_info import EnvironmentInfo - -import utils as exaslct_utils +from exasol_integration_test_docker_environment.lib.data.environment_info import ( + EnvironmentInfo, +) +from exasol_integration_test_docker_environment.lib.docker.container.utils import ( + remove_docker_container, +) +from exasol_integration_test_docker_environment.lib.docker.volumes.utils import ( + remove_docker_volumes, +) class DockerRunDBTestDockerDBTestCheckArguments(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() self.client = docker.from_env() @@ -25,26 +32,33 @@ def tearDown(self): self.test_environment.close() self.client.close() - def _getEnvironmentInfo(self): + def _get_environment_info(self): test_environment_name = f"""{self.test_environment.flavor_path.name}_release""" - environment_info_json_path = Path(self.test_environment.temp_dir, - f"cache/environments/{test_environment_name}/environment_info.json") + environment_info_json_path = Path( + self.test_environment.temp_dir, + f"cache/environments/{test_environment_name}/environment_info.json", + ) if environment_info_json_path.exists(): with environment_info_json_path.open() as f: return EnvironmentInfo.from_json(f.read()) def assert_mem_disk_size(self, mem_size: str, disk_size: str): - env_info = self._getEnvironmentInfo() + env_info = self._get_environment_info() - containers = \ - [c.name for c in - self.client.containers.list() - if env_info.database_info.container_info.container_name == c.name] + containers = [ + c.name + for c in self.client.containers.list() + if env_info.database_info.container_info.container_name == c.name + ] self.assertEqual(len(containers), 1) - exit_result = self.client.containers.get(containers[0]).exec_run("cat /exa/etc/EXAConf") + exit_result = self.client.containers.get(containers[0]).exec_run( + "cat /exa/etc/EXAConf" + ) output = exit_result[1].decode("UTF-8") - if output == '': - exit_result = self.client.containers.get(containers[0]).exec_run("cat /exa/etc/EXAConf") + if output == "": + exit_result = self.client.containers.get(containers[0]).exec_run( + "cat /exa/etc/EXAConf" + ) output = exit_result[1].decode("UTF-8") return_code = exit_result[0] return_code = exit_result[0] @@ -67,14 +81,21 @@ def assert_mem_disk_size(self, mem_size: str, disk_size: str): self.assertAlmostEqual(float(disk_size_matches[0]), float(disk_size), places=1) def remove_docker_environment(self): - env_info = self._getEnvironmentInfo() - remove_docker_container([env_info.test_container_info.container_name, - env_info.database_info.container_info.container_name]) - volumes_to_remove = \ - [v for v in - [env_info.test_container_info.volume_name, - env_info.database_info.container_info.volume_name] - if v is not None] + env_info = self._get_environment_info() + remove_docker_container( + [ + env_info.test_container_info.container_name, + env_info.database_info.container_info.container_name, + ] + ) + volumes_to_remove = [ + v + for v in [ + env_info.test_container_info.volume_name, + env_info.database_info.container_info.volume_name, + ] + if v is not None + ] remove_docker_volumes(volumes_to_remove) self._remove_docker_networks([env_info.network_info.network_name]) @@ -88,18 +109,19 @@ def _remove_docker_networks(self, networks): def test_run_db_tests_docker_db_disk_mem_size(self): mem_size = "1.3" disk_size = "1.4" - arguments = " ".join([ - f"--test-file=empty_test.py", - f"--db-mem-size={mem_size}GiB", - f"--db-disk-size={disk_size}GiB", - f"--reuse-test-environment", - exaslct_utils.get_full_test_container_folder_parameter() - ]) + arguments = " ".join( + [ + f"--test-file=empty_test.py", + f"--db-mem-size={mem_size}GiB", + f"--db-disk-size={disk_size}GiB", + f"--reuse-test-environment", + exaslct_utils.get_full_test_container_folder_parameter(), + ] + ) command = f"{self.test_environment.executable} run-db-test {arguments}" - self.test_environment.run_command( - command, track_task_dependencies=True) + self.test_environment.run_command(command, track_task_dependencies=True) self.assert_mem_disk_size(mem_size, disk_size) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_docker_db_reuse.py b/test/test_run_db_test_docker_db_reuse.py index 30230e62..7bd73f7a 100644 --- a/test/test_run_db_test_docker_db_reuse.py +++ b/test/test_run_db_test_docker_db_reuse.py @@ -1,13 +1,13 @@ import os import unittest from pathlib import Path +from typing import Dict, List +import utils as exaslct_utils # type: ignore # pylint: disable=import-error from exasol_integration_test_docker_environment.lib.docker import ContextDockerClient -from exasol_integration_test_docker_environment.lib.docker.container.utils import remove_docker_container - -import utils as exaslct_utils - -from typing import Dict, List +from exasol_integration_test_docker_environment.lib.docker.container.utils import ( + remove_docker_container, +) def get_docker_container_ids(*names) -> Dict[str, str]: @@ -22,9 +22,15 @@ class RunDBTestDockerDBReuseTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) - self._test_container_name = f"test_container_{self.test_environment.flavor_path.name}_release" - self._db_container_name = f"db_container_{self.test_environment.flavor_path.name}_release" + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) + self._test_container_name = ( + f"test_container_{self.test_environment.flavor_path.name}_release" + ) + self._db_container_name = ( + f"db_container_{self.test_environment.flavor_path.name}_release" + ) self.test_environment.clean_images() self.remove_docker_container() @@ -37,11 +43,15 @@ def remove_docker_container(self): def test_reuse(self): def run_command(): - command = [f"{self.test_environment.executable}", - f"run-db-test", - f"{exaslct_utils.get_full_test_container_folder_parameter()}", - "--reuse-test-environment"] - self.test_environment.run_command(" ".join(command), track_task_dependencies=True) + command = [ + f"{self.test_environment.executable}", + f"run-db-test", + f"{exaslct_utils.get_full_test_container_folder_parameter()}", + "--reuse-test-environment", + ] + self.test_environment.run_command( + " ".join(command), track_task_dependencies=True + ) def container_ids() -> Dict[str, str]: return get_docker_container_ids( @@ -56,5 +66,5 @@ def container_ids() -> Dict[str, str]: self.assertEqual(old_ids, new_ids) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_docker_pass_through.py b/test/test_run_db_test_docker_pass_through.py index 454c9982..5cb940bd 100644 --- a/test/test_run_db_test_docker_pass_through.py +++ b/test/test_run_db_test_docker_pass_through.py @@ -1,24 +1,28 @@ import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class RunDBTestDockerPassThroughTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): utils.close_environments(self.test_environment) def test_docker_test_environment(self): - command = f"{self.test_environment.executable} run-db-test --test-file docker_environment_test.py " \ - f"{exaslct_utils.get_full_test_container_folder_parameter()}" + command = ( + f"{self.test_environment.executable} run-db-test --test-file docker_environment_test.py " + f"{exaslct_utils.get_full_test_container_folder_parameter()}" + ) self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_run_db_test_external_db.py b/test/test_run_db_test_external_db.py index 85dae91a..28c432c0 100644 --- a/test/test_run_db_test_external_db.py +++ b/test/test_run_db_test_external_db.py @@ -1,18 +1,27 @@ import unittest -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import ( + utils, +) # type: ignore # type: ignore class DockerRunDBTestExternalDBTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() self.docker_environment_name = self.__class__.__name__ - self.docker_environments = self.test_environment.spawn_docker_test_environments(self.docker_environment_name) - # localhost gets translated in exaslct to the Gateway address of the docker environment network, because thats typically the IP Adress of the bridge to the host, for google cloud this means it should be able to connect to the db via the port forwards from the test container + self.docker_environments = self.test_environment.spawn_docker_test_environments( + self.docker_environment_name + ) + # localhost gets translated in exaslct to the Gateway address of the docker environment network, + # because thats typically the IP Adress of the bridge to the host, + # for google cloud this means it should be able to connect to the db + # via the port forwards from the test container # TODO check alternative of ip address on default bridge self.docker_environment = self.docker_environments.on_host_docker_environment @@ -20,21 +29,22 @@ def tearDown(self): utils.close_environments(self.docker_environments, self.test_environment) def test_run_db_tests_external_db(self): - arguments = " ".join([ - f"--environment-type external_db", - f"--external-exasol-db-host {self.docker_environment.database_host}", - f"--external-exasol-db-port {self.docker_environment.ports.database}", - f"--external-exasol-bucketfs-port {self.docker_environment.ports.bucketfs}", - f"--external-exasol-ssh-port {self.docker_environment.ports.ssh}", - f"--external-exasol-db-user {self.docker_environment.db_username}", - f"--external-exasol-db-password {self.docker_environment.db_password}", - f"--external-exasol-bucketfs-write-password {self.docker_environment.bucketfs_password}", - exaslct_utils.get_full_test_container_folder_parameter() - ]) + arguments = " ".join( + [ + f"--environment-type external_db", + f"--external-exasol-db-host {self.docker_environment.database_host}", + f"--external-exasol-db-port {self.docker_environment.ports.database}", + f"--external-exasol-bucketfs-port {self.docker_environment.ports.bucketfs}", + f"--external-exasol-ssh-port {self.docker_environment.ports.ssh}", + f"--external-exasol-db-user {self.docker_environment.db_username}", + f"--external-exasol-db-password {self.docker_environment.db_password}", + f"--external-exasol-bucketfs-write-password {self.docker_environment.bucketfs_password}", + exaslct_utils.get_full_test_container_folder_parameter(), + ] + ) command = f"{self.test_environment.executable} run-db-test {arguments}" - self.test_environment.run_command( - command, track_task_dependencies=True) + self.test_environment.run_command(command, track_task_dependencies=True) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/test_security_scan.py b/test/test_security_scan.py index 3bff91ad..7b26e345 100644 --- a/test/test_security_scan.py +++ b/test/test_security_scan.py @@ -1,15 +1,17 @@ import unittest from pathlib import Path -import utils as exaslct_utils -from exasol_integration_test_docker_environment.testing import utils +import utils as exaslct_utils # type: ignore # pylint: disable=import-error +from exasol_integration_test_docker_environment.testing import utils # type: ignore class DockerSecurityScanTest(unittest.TestCase): def setUp(self): print(f"SetUp {self.__class__.__name__}") - self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp(self, exaslct_utils.EXASLCT_DEFAULT_BIN) + self.test_environment = exaslct_utils.ExaslctTestEnvironmentWithCleanUp( + self, exaslct_utils.EXASLCT_DEFAULT_BIN + ) self.test_environment.clean_images() def tearDown(self): @@ -17,19 +19,26 @@ def tearDown(self): def test_docker_build(self): command = f"{self.test_environment.executable} security-scan" - completed_process = self.test_environment.run_command(command, - track_task_dependencies=True, capture_output=True) + completed_process = self.test_environment.run_command( + command, track_task_dependencies=True, capture_output=True + ) output = completed_process.stdout.decode("UTF-8") self.assertIn("============ START SECURITY SCAN REPORT - ", output) self.assertIn("Running scan...", output) self.assertIn("============ END SECURITY SCAN REPORT - ", output) - report = Path(self.test_environment.temp_dir, "security_scan", "test-flavor", "report", "report.txt") + report = Path( + self.test_environment.temp_dir, + "security_scan", + "test-flavor", + "report", + "report.txt", + ) self.assertTrue(report.exists()) with open(report) as report_file: report_result = report_file.read() self.assertIn("Report 123", report_result) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/test/utils.py b/test/utils.py index 904ec585..1e4fa129 100644 --- a/test/utils.py +++ b/test/utils.py @@ -1,13 +1,20 @@ from pathlib import Path from subprocess import CompletedProcess -from typing import List, Any, Dict - -from exasol_integration_test_docker_environment.lib.data.test_container_content_description import \ - TestContainerContentDescription -from exasol_integration_test_docker_environment.testing import api_test_environment, exaslct_test_environment -from exasol_integration_test_docker_environment.testing.exaslct_docker_test_environment import \ - ExaslctDockerTestEnvironment -from exasol_integration_test_docker_environment.testing.spawned_test_environments import SpawnedTestEnvironments +from typing import Any, Dict, List, Optional + +from exasol_integration_test_docker_environment.lib.data.test_container_content_description import ( + TestContainerContentDescription, +) # type: ignore +from exasol_integration_test_docker_environment.testing import ( # type: ignore + api_test_environment, + exaslct_test_environment, +) +from exasol_integration_test_docker_environment.testing.exaslct_docker_test_environment import ( + ExaslctDockerTestEnvironment, +) # type: ignore +from exasol_integration_test_docker_environment.testing.spawned_test_environments import ( + SpawnedTestEnvironments, +) # type: ignore from exasol_script_languages_container_tool.lib import api @@ -17,11 +24,12 @@ EXASLCT_DEFAULT_BIN = Path(Path(__file__).parent.parent, "exaslct") -class ExaslctApiTestEnvironmentWithCleanup(): +class ExaslctApiTestEnvironmentWithCleanup: def __init__(self, test_object, clean_images_at_close, name=None): self._itde_api_test_environement = api_test_environment.ApiTestEnvironment( - test_object=test_object, name=name) + test_object=test_object, name=name + ) self.clean_images_at_close = clean_images_at_close @property @@ -65,41 +73,49 @@ def close(self): self._itde_api_test_environement.close() def clean_all_images(self): - api.clean_all_images(docker_repository_name=self._itde_api_test_environement.docker_repository_name) + api.clean_all_images( + docker_repository_name=self._itde_api_test_environement.docker_repository_name + ) - def spawn_docker_test_environment_with_test_container(self, name: str, - test_container_content: TestContainerContentDescription, - additional_parameter: Dict[str, Any] = None) \ - -> ExaslctDockerTestEnvironment: + def spawn_docker_test_environment_with_test_container( + self, + name: str, + test_container_content: TestContainerContentDescription, + additional_parameter: Optional[Dict[str, Any]] = None, + ) -> ExaslctDockerTestEnvironment: return self._itde_api_test_environement.spawn_docker_test_environment_with_test_container( name=name, test_container_content=test_container_content, - additional_parameter=additional_parameter + additional_parameter=additional_parameter, ) - def spawn_docker_test_environment(self, name: str, - additional_parameter: Dict[str, Any] = None) \ - -> ExaslctDockerTestEnvironment: + def spawn_docker_test_environment( + self, name: str, additional_parameter: Optional[Dict[str, Any]] = None + ) -> ExaslctDockerTestEnvironment: return self._itde_api_test_environement.spawn_docker_test_environment( name=name, additional_parameter=additional_parameter ) -class ExaslctTestEnvironmentWithCleanUp(): +class ExaslctTestEnvironmentWithCleanUp: - def __init__(self, - test_object, - executable=EXASLCT_DEFAULT_BIN, - clean_images_at_close=True, - name=None, - flavor_path: Path = FLAVORS_ROOT_DIRECTORY / "test-flavor"): + def __init__( + self, + test_object, + executable=EXASLCT_DEFAULT_BIN, + clean_images_at_close=True, + name=None, + flavor_path: Path = FLAVORS_ROOT_DIRECTORY / "test-flavor", + ): self._flavor_path = flavor_path self._clean_images_at_close = clean_images_at_close - self._itde_cli_test_environment = exaslct_test_environment.ExaslctTestEnvironment( - test_object=test_object, - executable=executable, - clean_images_at_close=False, - name=name + self._itde_cli_test_environment = ( + exaslct_test_environment.ExaslctTestEnvironment( + test_object=test_object, + executable=executable, + clean_images_at_close=False, + name=name, + ) ) @property @@ -137,27 +153,34 @@ def repository_name(self, value): def clean_images(self): self.run_command(f"{self.executable} clean-flavor-images", clean=True) - def run_command(self, - command: str, - use_output_directory: bool = True, - use_docker_repository: bool = True, - use_flavor_path: bool = True, - track_task_dependencies: bool = False, - clean: bool = False, - capture_output: bool = False) -> CompletedProcess: + def run_command( + self, + command: str, + use_output_directory: bool = True, + use_docker_repository: bool = True, + use_flavor_path: bool = True, + track_task_dependencies: bool = False, + clean: bool = False, + capture_output: bool = False, + ) -> CompletedProcess: if use_flavor_path: command = f"{command} --flavor-path {self.flavor_path} " - return self._itde_cli_test_environment.run_command(command=command, - use_output_directory=use_output_directory, - use_flavor_path=False, - use_docker_repository=use_docker_repository, - track_task_dependencies=track_task_dependencies, - clean=clean, - capture_output=capture_output) - - def spawn_docker_test_environments(self, name: str, additional_parameter: List[str] = None) \ - -> SpawnedTestEnvironments: - return self._itde_cli_test_environment.spawn_docker_test_environments(name, additional_parameter) + return self._itde_cli_test_environment.run_command( + command=command, + use_output_directory=use_output_directory, + use_flavor_path=False, + use_docker_repository=use_docker_repository, + track_task_dependencies=track_task_dependencies, + clean=clean, + capture_output=capture_output, + ) + + def spawn_docker_test_environments( + self, name: str, additional_parameter: Optional[List[str]] = None + ) -> SpawnedTestEnvironments: + return self._itde_cli_test_environment.spawn_docker_test_environments( + name, additional_parameter + ) def close(self): try: @@ -168,7 +191,9 @@ def close(self): self._itde_cli_test_environment.close() def clean_all_images(self): - self.run_command(f"{self.executable} clean-all-images", use_flavor_path=False, clean=True) + self.run_command( + f"{self.executable} clean-all-images", use_flavor_path=False, clean=True + ) def get_full_test_container_folder_parameter() -> str: