diff --git a/.github/workflows/check_version.yaml b/.github/workflows/check_version.yaml index 5d8a5f72..4e77de6a 100644 --- a/.github/workflows/check_version.yaml +++ b/.github/workflows/check_version.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: ./.github/actions/prepare_poetry_env diff --git a/.github/workflows/env_test.yml b/.github/workflows/env_test.yml index 69d5ca3c..9162b184 100644 --- a/.github/workflows/env_test.yml +++ b/.github/workflows/env_test.yml @@ -7,7 +7,7 @@ jobs: prep-testbed: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - id: set-matrix run: | sudo apt-get install jq @@ -23,7 +23,7 @@ jobs: test-path: ${{fromJson(needs.prep-testbed.outputs.matrix)}} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run all env tests run: ./scripts/test/ci_tests/run_ci_test.sh ${{ matrix.test-path }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9970167f..efe54ef1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,7 +7,7 @@ jobs: test-docker-starter: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Test ./exaslct run: ./exaslct --help @@ -15,7 +15,7 @@ jobs: prep-testbed: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - id: set-matrix run: | sudo apt-get install jq @@ -37,7 +37,7 @@ jobs: runs-on: ubuntu-latest name: ${{ matrix.test-path.name }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/prepare_poetry_env @@ -55,7 +55,7 @@ jobs: runs-on: ubuntu-latest environment: publish steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Build new Docker image run: "bash scripts/build/build_docker_runner_image.sh" - name: Docker login @@ -64,4 +64,4 @@ jobs: SECRET_DOCKER_USER_NAME: ${{ secrets.DOCKER_USER_NAME }} SECRET_DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} - name: Push new Docker image - run: "bash scripts/build/push_docker_runner_image.sh main" \ No newline at end of file + run: "bash scripts/build/push_docker_runner_image.sh main" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f6b13573..dec1852a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,7 @@ jobs: steps: - name: SCM Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python & Poetry Environment uses: ./.github/actions/prepare_poetry_env diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml index edaf11f1..700d2cd3 100644 --- a/.github/workflows/shellcheck.yaml +++ b/.github/workflows/shellcheck.yaml @@ -7,6 +7,6 @@ jobs: shellcheck: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run shellcheck run: ./scripts/build/shellcheck.sh diff --git a/.gitignore b/.gitignore index 0c4e9c4d..ebc0f9f9 100644 --- a/.gitignore +++ b/.gitignore @@ -140,3 +140,5 @@ dmypy.json # Project .build_output/ +# Emacs +TAGS \ No newline at end of file diff --git a/doc/changes/changelog.md b/doc/changes/changelog.md index 94f5b3e8..e95fea52 100644 --- a/doc/changes/changelog.md +++ b/doc/changes/changelog.md @@ -1,5 +1,6 @@ # Changes +* [0.20.0](changes_0.20.0.md) * [0.19.0](changes_0.19.0.md) * [0.18.3](changes_0.18.3.md) * [0.18.2](changes_0.18.2.md) diff --git a/doc/changes/changes_0.20.0.md b/doc/changes/changes_0.20.0.md new file mode 100644 index 00000000..b5d4be8d --- /dev/null +++ b/doc/changes/changes_0.20.0.md @@ -0,0 +1,18 @@ +# Script-Languages-Container-Tool 0.20.0, released 2024-07-09 + +Code name: Fix vulnerabilities + +## Summary + +This release fixes the following vulnerabilities by updating dependencies: +* CVE-2024-35195 in dependency `requests` in versions < `2.32.0` caused by requests `Session` object not verifying requests after making first request with `verify=False` +* CVE-2024-37891 in transitive dependency via `boto3` to `urllib3` in versions < `2.2.2` caused by proxy-authorization request header not to be stripped during cross-origin redirects as no update of notebook-connector is available, yet. +* GHSA-w235-7p84-xx57 in transitive dependency via `luigi` to `tornado` in versions < `6.4.1` enabling CRLF injection in `CurlAsyncHTTPClient` headers. +* GHSA-753j-mpmx-qq6g in transitive dependency via `luigi` to `tornado` in versions < `6.4.1` due to inconsistent interpretation of HTTP Requests ('HTTP Request/Response Smuggling') + +However, the release ignores the following vulnerabilities +* GHSA-753j-mpmx-qq6g in dependency `configobj` in versions ≤ `5.0.8` being ReDoS exploitable by developers using values in a server-side configuration file as SLCT is used only client side and a patched version is not available, yet. + +## Security Issues + +* #216: Updated dependencies to fix vulnerabilities diff --git a/doc/dependencies.md b/doc/dependencies.md index dafc25d0..ca101700 100644 --- a/doc/dependencies.md +++ b/doc/dependencies.md @@ -1,51 +1,73 @@ # Dependencies - + ## Compile Dependencies -|Package| Version | -|---|---------| -|poetry| 1.1.11 | +| Package | Version | +|---------|---------| +| poetry | 1.1.11 | ## Runtime Dependencies -| Package | Version | -|---------------------------------------------------------------------------------------------------------------------------|-----------| -| Python | >=3.8 | -| certifi | 2020.12.5 | -| chardet | 4.0.0 | -| click | 7.1.2 | -| decorator | 4.4.2 | -| docker | 5.0.0 | -| docutils | 0.17.1 | -| exasol-integration-test-docker-environment @ git+https://github.com/exasol/integration-test-docker-environment.git@0.11.0 | -| gitdb | 4.0.7 | -| gitpython | 3.1.15 | -| humanfriendly | 9.1 | -| idna | 2.10 | -| importlib-metadata | 4.0.1 | -| importlib-resources | 5.1.2 | -| jinja2 | 2.11.3 | -| jsonpickle | 2.0.0 | -| lockfile | 0.12.2 | -| luigi | 3.0.3 | -| markupsafe | 1.1.1 | -| netaddr | 0.8.0 | -| networkx | 2.5.1 | -| pydot | 1.4.2 | -| pyparsing | 2.4.7 | -| pyreadline | 2.1 | -| python-daemon | 2.3.0 | -| python-dateutil | 2.8.1 | -| pywin32 | 227 | -| requests | 2.25.1 | -| simplejson | 3.17.2 | -| six | 1.15.0 | -| smmap | 4.0.0 | -| stopwatch.py | 1.0.1 | -| tenacity | 6.3.1 | -| tornado | 6.1 | -| typing-extensions | 3.7.4.3 | -| urllib3 | 1.22 | -| websocket-client | 0.58.0 | -| zipp | 3.4.1 | \ No newline at end of file +| Package | Version | Description | +|--------------------------------------------|-----------------|------------------------------------------------------------------------------------------------| +| anyio | 4.4.0 | High level compatibility layer for multiple asynchronous event loop implementations | +| attrs | 23.2.0 | Classes Without Boilerplate | +| bcrypt | 4.1.3 | Modern password hashing for your software and your servers | +| certifi | 2024.7.4 | Python package for providing Mozilla's CA Bundle. | +| cffi | 1.16.0 | Foreign Function Interface for Python calling C code. | +| charset-normalizer | 3.3.2 | The Real First Universal Charset Detector. Open, modern and actively maintained alternative... | +| click | 8.1.7 | Composable command line interface toolkit | +| configobj | 5.0.8 | Config file reading, writing and validation. | +| cryptography | 42.0.8 | cryptography is a package which provides cryptographic recipes and primitives to Python dev... | +| decorator | 5.1.1 | Decorators for Humans | +| deprecated | 1.2.14 | Python @deprecated decorator to deprecate old python classes, functions or methods. | +| docker | 7.1.0 | A Python library for the Docker Engine API. | +| docutils | 0.20.1 | Docutils -- Python Documentation Utilities | +| exasol-bucketfs | 0.11.0 | BucketFS utilities for the Python programming language | +| exasol-error-reporting | 0.4.0 | Exasol Python Error Reporting | +| exasol-integration-test-docker-environment | 3.1.0 | Integration Test Docker Environment for Exasol | +| exasol-saas-api | 0.7.0 | API enabling Python applications connecting to Exasol database SaaS instances and using the... | +| fabric | 3.2.2 | High level SSH command execution | +| gitdb | 4.0.11 | Git Object Database | +| gitpython | 3.1.43 | GitPython is a Python library used to interact with Git repositories | +| h11 | 0.14.0 | A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 | +| httpcore | 1.0.5 | A minimal low-level HTTP client. | +| httpx | 0.27.0 | The next generation HTTP client. | +| humanfriendly | 10.0 | Human friendly output for text interfaces using Python | +| idna | 3.7 | Internationalized Domain Names in Applications (IDNA) | +| ifaddr | 0.2.0 | Cross-platform network interface and IP address enumeration library | +| importlib-metadata | 8.0.0 | Read metadata from Python packages | +| importlib-resources | 6.4.0 | Read resources from Python packages | +| invoke | 2.2.0 | Pythonic task execution | +| jinja2 | 3.1.4 | A very fast and expressive template engine. | +| joblib | 1.4.2 | Lightweight pipelining with Python functions | +| jsonpickle | 3.2.2 | Python library for serializing arbitrary object graphs into JSON | +| lockfile | 0.12.2 | Platform-independent file locking module | +| luigi | 3.5.1 | Workflow mgmgt + task scheduling + dependency resolution. | +| markupsafe | 2.1.5 | Safely add untrusted strings to HTML/XML markup. | +| netaddr | 1.3.0 | A network address manipulation library for Python | +| networkx | 2.8.8 | Python package for creating and manipulating graphs and networks | +| paramiko | 3.4.0 | SSH2 protocol library | +| portalocker | 2.10.0 | Wraps the portalocker recipe for easy usage | +| pycparser | 2.22 | C parser in Python | +| pydot | 2.0.0 | Python interface to Graphviz's Dot | +| pynacl | 1.5.0 | Python binding to the Networking and Cryptography (NaCl) library | +| pyparsing | 3.1.2 | pyparsing module - Classes and methods to define and execute parsing grammars | +| python-daemon | 3.0.1 | Library to implement a well-behaved Unix daemon process. | +| python-dateutil | 2.9.0.post0 | Extensions to the standard Python datetime module | +| requests | 2.32.3 | Python HTTP for Humans. | +| setuptools | 70.2.0 | Easily download, build, install, upgrade, and uninstall Python packages | +| simplejson | 3.19.2 | Simple, fast, extensible JSON encoder/decoder for Python | +| six | 1.16.0 | Python 2 and 3 compatibility utilities | +| smmap | 5.0.1 | A pure Python implementation of a sliding window memory map manager | +| sniffio | 1.3.1 | Sniff out which async library your code is running under | +| stopwatch-py | 2.0.1 | A simple stopwatch for python | +| tenacity | 8.4.2 | Retry code until it succeeds | +| toml | 0.10.2 | Python Library for Tom's Obvious, Minimal Language | +| tornado | 6.4.1 | Tornado is a Python web framework and asynchronous networking library, originally developed... | +| typeguard | 4.0.0 | Run-time type checker for Python | +| types-requests | 2.32.0.20240622 | Typing stubs for requests | +| urllib3 | 2.2.2 | HTTP library with thread-safe connection pooling, file post, and more. | +| wrapt | 1.16.0 | Module for decorators, wrappers and monkey patching. | +| zipp | 3.19.2 | Backport of pathlib-compatible object wrapper for zip files | diff --git a/doc/developer_guide/developer_guide.md b/doc/developer_guide/developer_guide.md index da0f4c1f..76191dfb 100644 --- a/doc/developer_guide/developer_guide.md +++ b/doc/developer_guide/developer_guide.md @@ -1,34 +1,16 @@ # Script-Languages-Container-Tool Developer Guide -EXASLCT is the build tool for the script language container. -This document is about the inner working of EXASLCT. +EXASLCT is the build tool for the script language container. This document is about the inner working of EXASLCT. ## About the Script Language Containers -The Script Language Containers are getting build -from several Dockerfiles which depend on each other. -These Dockerfiles need to install all necessary -dependencies for the [script client](https://github.com/exasol/script-languages/tree/master/exaudfclient/base), -compile the script client and install all necessary dependencies -for the flavor and the customizations of the user. - -## Problem Statement: -The old-style to build the containers was slow and laborious. -A single change in the dependencies required a rebuild of everything. -Furthermore, essential dependencies for the script client were -mixed with flavor dependent dependencies. -Changes were difficult for users and could break the container. -It was actual unclear which of the dependencies were essential -and which were not. For some flavors it was impossible to run -the build on travis, because it exceeded the maximum runtime -per job of 50 minutes. -The build system and the test runner were both bash scripts -which were messy and difficult to maintain. -They worked with background jobs to do things in parallel -which convoluted the logs which made error analysis difficult. -Further, the test runner left temporary files which were owned by root, -because they were created by a docker container. - -## Design Goals: + +The Script Language Containers are getting build from several Dockerfiles which depend on each other. These Dockerfiles need to install all necessary dependencies for the [script client](https://github.com/exasol/script-languages/tree/master/exaudfclient/base), compile the script client and install all necessary dependencies for the flavor and the customizations of the user. + +## Problem Statement + +The old-style to build the containers was slow and laborious. A single change in the dependencies required a rebuild of everything. Furthermore, essential dependencies for the script client were mixed with flavor dependent dependencies. Changes were difficult for users and could break the container. It was actual unclear which of the dependencies were essential and which were not. For some flavors it was impossible to run the build on travis, because it exceeded the maximum runtime per job of 50 minutes. The build system and the test runner were both bash scripts which were messy and difficult to maintain. They worked with background jobs to do things in parallel which convoluted the logs which made error analysis difficult. Further, the test runner left temporary files which were owned by root, because they were created by a docker container. + +## Design Goals * Easy customization of existing flavors for the user * User customizations are not able to break the container @@ -39,119 +21,75 @@ because they were created by a docker container. * Local and remote caching * Faster development cycles for the script client * Allowing output redirection for testing of the flavor -* Encapsulate running the tests and all its dependencies +* Encapsulate running the tests and all its dependencies into docker containers or volumes. * Error resilient ## Programming Model -Exaslct is a mix of a build system, test runner and infrastructure as code. -As such, we typically have tasks like the following one: -- Build Image -- Start Container -- Upload something +Exaslct is a mix of a build system, test runner and infrastructure as code. As such, we typically have tasks like the following one: -Most of these tasks produce some kind of output, for example: -- docker image -- a running docker container - -Often, other tasks then depend either on the output, or -the action of one or more other tasks. -These dependencies build a direct acyclic graph of tasks, -also known as workflow. +* Build Image +* Start Container +* Upload something +Most of these tasks produce some kind of output, for example: +* docker image +* a running docker container -Tasks that depend on each other need to run in sequence, -but tasks which are independent of each other may run in parallel. -This model also allows a good separation of concern, -because each Task solves one Problem. +Often, other tasks then depend either on the output, or the action of one or more other tasks. These dependencies build a direct acyclic graph of tasks, also known as workflow. +Tasks that depend on each other need to run in sequence, but tasks which are independent of each other may run in parallel. This model also allows a good separation of concern, because each Task solves one Problem. -As workflow executor, we use [Luigi](https://luigi.readthedocs.io/en/stable/) -which was actually developed for batch data science workflows, -but is suitable for other scenarios, too. -Luigi describes tasks as subclasses of Luigi.Task -which implements the following methods: +As workflow executor, we use [Luigi](https://luigi.readthedocs.io/en/stable/) which was actually developed for batch data science workflows, but is suitable for other scenarios, too. Luigi describes tasks as subclasses of Luigi.Task which implements the following methods: ``` class TaskC(luigi.Task): def output(self): return Target() - + def run(self): #do somthing pass - + def requires(self): return [TaskA(),TaskB()] ``` -Here we describe a TaskC which depends on TaskA and TaskB -defined in the `requires()` method. -It does something which is specified in the run() method. -Further, it produces Target() as output. -Luigi provides the dependency resolution, scheduling and parallelization. - -Besides, this static way of describing the dependencies between tasks, -Luigi also provides so called -[dynamic dependencies](https://luigi.readthedocs.io/en/stable/tasks.html#dynamic-dependencies), -which allow more flexible patterns in special case. -Especially, if the order of execution of dependencies is important, or -the dependencies depend on some calculation. The dynamic dependencies -allow the implementation of a fork-join pattern. - -In EXASLCT we use our own subclass of Luigi.Task, `StoppableTask` -as base class. The StoppableTask adds profiling, -recording of dependencies for visualization or debugging and -stops if any other StoppableTask failed in the workflow. - -## Build Steps and their Dependencies - -We compose the language container from several Dockerfiles. -Each Dockerfile installs dependencies for one specific purpose. -We also added a separate Dockerfile flavor-customization for user specific changes. -The user specific changes will be merged on filesystem basis -with the resulting docker images for the script client. -The merge will overwrite user specific changes -that could prevent the script client from working properly. +Here we describe a TaskC which depends on TaskA and TaskB defined in the `requires()` method. It does something which is specified in the run() method. Further, it produces Target() as output. Luigi provides the dependency resolution, scheduling and parallelization. + +Besides, this static way of describing the dependencies between tasks, Luigi also provides so called [dynamic dependencies](https://luigi.readthedocs.io/en/stable/tasks.html#dynamic-dependencies), which allow more flexible patterns in special case. Especially, if the order of execution of dependencies is important, or the dependencies depend on some calculation. The dynamic dependencies allow the implementation of a fork-join pattern. + +In EXASLCT we use our own subclass of Luigi.Task, `StoppableTask` as base class. The StoppableTask adds profiling, recording of dependencies for visualization or debugging and stops if any other StoppableTask failed in the workflow. + +## Build Steps and Their Dependencies + +We compose the language container from several Dockerfiles. Each Dockerfile installs dependencies for one specific purpose. We also added a separate Dockerfile flavor-customization for user specific changes. The user specific changes will be merged on filesystem basis with the resulting docker images for the script client. The merge will overwrite user specific changes that could prevent the script client from working properly. The following graph shows the default build steps and their dependencies. ![](images/image-dependencies.png) -A dependency between build steps can be either a FROM or -COPY dependencies. A FROM dependency means that -the target of the arrow uses the source of the arrow as base image. -A COPY dependency means that the target of the arrow -[copies parts](https://docs.docker.com/develop/develop-images/multistage-build/) -of the source of the arrow. +A dependency between build steps can be either a FROM or COPY dependencies. A FROM dependency means that the target of the arrow uses the source of the arrow as base image. A COPY dependency means that the target of the arrow [copies parts](https://docs.docker.com/develop/develop-images/multistage-build/) of the source of the arrow. -All steps with the string "build_run" in their name, -either run the build for the script client or -at least inherit from an image which had built it. -As such these images contain all necessary tools to rebuild -the script client for debugging purposes. +All steps with the string "build_run" in their name, either run the build for the script client or at least inherit from an image which had built it. As such these images contain all necessary tools to rebuild the script client for debugging purposes. -## How do we define build steps for a flavor +## How do we Define Build Steps for a Flavor? -Each flavor has a build_steps.py file in the -/flavor_base directory -which defines the build steps as classes which inherit -from DockerFlavorAnalyzeImageTask. -For example: +Each flavor has a build_steps.py file in the /flavor_base directory which defines the build steps as classes which inherit from DockerFlavorAnalyzeImageTask. For example: -``` +```python class AnalyzeBuildRun(DockerFlavorAnalyzeImageTask): def get_build_step(self) -> str: - # name of the build step, which defines the directory name - # for the build context of this image and gets used for the + # name of the build step, which defines the directory name + # for the build context of this image and gets used for the # build boundaries return "build_run" def requires_tasks(self): - # other build steps the current build step depends on, the keys used here, + # other build steps the current build step depends on, the keys used here, # get replaced in your dockerfile with the actual image names of your dependencies return {"build_deps": AnalyzeBuildDeps(flavor_path=self.flavor_path), "language_deps": AnalyzeLanguageDeps(flavor_path=self.flavor_path)} @@ -167,71 +105,58 @@ class AnalyzeBuildRun(DockerFlavorAnalyzeImageTask): def get_image_changing_build_arguments(self): # optional: build arguments which might change the image content return dict() - + def get_transparent_build_arguments(self): # optional: build arguments which won't change the image content return dict() ``` +## How Does Caching Work? + +Exaslct was built with caching in mind, because building a flavor might take very long, and many build steps don't change that often. Furthermore, an end user most likely only changes the build-step flavor-customization which is designed to have a minimal impact on all other build steps. + +### Which Caches are Available? + +EXASLCT provides three types of caching: +* docker images managed by the docker daemon +* file system cache with saved docker images +* docker registry as a remote cache + +All caches can work together, the analysis phase checks in which cache an images is available. The different type of caches have different precedence which might you override by command line parameters. The precedence is derived by how fast is an image available. Docker images managed by the docker daemon are instantaneously available. Saved docker images on the filesystem follow next, they need to be loaded by the daemon, but are most likely on a local file system. The last cache which gets checked is a docker registry, because it is most likely not local and needs transport over network. + +### Finding the Corresponding Docker Images to the Current Build Context + +EXASLCT computes a hash value for the whole build context of an image and adds the hash value to the tag of the image. Responsible for hashing the build context is the `BuildContextHasher` which uses the `FileDirectoryListHasher`. -## How does caching work - -Exaslct was built with caching in mind, -because building a flavor might take very long, and -many build steps don't change that often. -Furthermore, an end user most likely only changes the build-step -flavor-customization which is designed to have a minimal impact -on all other build steps. - -### Which caches are available - -EXASLCT provides three types of caching: -- docker images managed by the docker daemon -- file system cache with saved docker images -- docker registry as a remote cache - -All caches can work together, the analysis phase checks -in which cache an images is available. -The different type of caches have different precedence -which might you override by command line parameters. -The precedence is derived by how fast is an image available. -Docker images managed by the docker daemon are instantaneously available. -Saved docker images on the filesystem follow next, -they need to be loaded by the daemon, -but are most likely on a local file system. -The last cache which gets checked is a docker registry, -because it is most likely not local and needs transport over network. - -### Finding the corresponding docker images to the current build context - -EXASLCT computes a hash value for the whole build context of an image and -adds the hash value to the tag of the image. -Responsible for hashing the build context is the `BuildContextHasher` -which uses the `FileDirectoryListHasher`. - -The `BuildContextHasher` combines the hash values of all directories, -files and their executable permissions of the build context, -such as the hash values of all images -the current images depends on, and the image changing build arguments -to one hash value for the image. - -Other build arguments which only influence the resources -which are used to build the image are not part of the final hash. -The `BuildContextHasher` hashes the execution rights of files, -because these are the only rights which get saved in git and -can be important for the images. - -## Creating a release +The `BuildContextHasher` combines the hash values of all directories, files and their executable permissions of the build context, such as the hash values of all images the current images depends on, and the image changing build arguments to one hash value for the image. + +Other build arguments which only influence the resources which are used to build the image are not part of the final hash. The `BuildContextHasher` hashes the execution rights of files, because these are the only rights which get saved in git and can be important for the images. + +## Updating Drivers and ExaPlus + +EXASLCT uses drivers and SQL Client ExaPlus for tests: +* JDBC driver +* OBDC driver +* ExaPlus + +Instructions + +1. You can download the latest versions from https://downloads.exasol.com/clients-and-drivers/odbc. +2. When downloading the ODBC driver, then please ensure to select the correct operating system, e.g. "Linux (x86_64)". +3. Copy the download URL used by your browser into file [test/resources/test_container/full/build/Dockerfile](https://github.com/exasol/script-languages-container-tool/blob/main/test/resources/test_container/full/build/Dockerfile). +4. Update the path to the resp. `*.so` files in file `lib/tasks/test/run_db_test.py`, method [command_line()](https://github.com/exasol/script-languages-container-tool/blob/main/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py#L110). + +## Creating a Release ### Prerequisites * Change log needs to be up to date * Latest change log version needs to match project and package version * Release tag needs to match package, changelog and project version - * For Example: - * Tag: 0.4.0 - * Changelog: changes_0.4.0.md - * \`poetry version -s\`: 0.4.0 + * For Example: + * Tag: `0.4.0` + * Changelog: `changes_0.4.0.md` + * `poetry version -s`: 0.4.0 ### Triggering the Release In order to trigger a release a new tag must be pushed to Github. @@ -246,9 +171,9 @@ For further details see: `.github/workflows/release.yml`. git push origin x.y.z -### What to do if the release failed? +### What to do if The Release Failed? -#### The release failed during pre-release checks +#### The Release Failed During Pre-Release Checks 1. Delete the local tag @@ -261,8 +186,7 @@ For further details see: `.github/workflows/release.yml`. 3. Fix the issue(s) which lead to the failing checks 4. Start the release process from the beginning - -#### One of the release steps failed (Partial Release) +#### One of the Release Steps Failed (Partial Release) 1. Check the Github action/workflow to see which steps failed 2. Finish or redo the failed release steps manually @@ -271,4 +195,4 @@ For further details see: `.github/workflows/release.yml`. _Scenario_: Publishing of the release on Github was successfully but during the PyPi release, the upload step got interrupted. -_Solution_: Manually push the package to PyPi \ No newline at end of file +_Solution_: Manually push the package to PyPi diff --git a/error_code_config.yml b/error_code_config.yml new file mode 100644 index 00000000..f327a90e --- /dev/null +++ b/error_code_config.yml @@ -0,0 +1,3 @@ +error-tags: + SLCT: + highest-index: 0 diff --git a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py index b13f4452..d1dab50a 100644 --- a/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py +++ b/exasol_script_languages_container_tool/lib/tasks/test/run_db_test.py @@ -1,6 +1,7 @@ from collections import namedtuple from pathlib import Path from typing import Optional +from io import StringIO import docker.models.containers import luigi @@ -17,6 +18,11 @@ from exasol_script_languages_container_tool.lib.utils.docker_utils import exec_run_and_write_to_stream +class DockerCommandException(Exception): + """ + Executing a special command inside the TestContainer failed. + """ + class RunDBTest(FlavorBaseTask, RunDBTestParameter, @@ -36,11 +42,27 @@ def __init__(self, *args, **kwargs): self._test_container_info = self.test_environment_info.test_container_info self._database_info = self.test_environment_info.database_info + def _run_command( + self, + docker_client: docker.client, + container: docker.models.containers.Container, + command: str, + ) -> str: + file = StringIO() + exit_code = exec_run_and_write_to_stream(docker_client, container, command, file, {}) + if exit_code != 0: + raise DockerCommandException(f"Command returned {exit_code}: {command}") + return file.getvalue().strip() + def run_task(self): self.logger.info("Running db tests") with self._get_docker_client() as docker_client: test_container = docker_client.containers.get(self._test_container_info.container_name) - bash_cmd = self.generate_test_command() + odbc_driver = self._run_command( + docker_client, test_container, + "find /downloads/ODBC -name libexaodbc\*.so", + ) + bash_cmd = self.generate_test_command(odbc_driver) test_output_file = self.get_log_path().joinpath("test_output") exit_code = self.run_test_command(docker_client, bash_cmd, test_container, test_output_file) self.handle_test_result(exit_code, test_output_file) @@ -103,28 +125,29 @@ def run_test_command(self, docker_client: docker.client, bash_cmd: str, exit_code = exec_run_and_write_to_stream(docker_client, test_container, bash_cmd, file, environment) return exit_code - def generate_test_command(self) -> str: - credentials = f"--user '{self.db_user}' --password '{self.db_password}'" - log_level = f"--loglevel={self.test_log_level}" - server = f"--server '{self._database_info.host}:{self._database_info.ports.database}'" - environment = "--driver=/downloads/ODBC/lib/linux/x86_64/libexaodbc-uo2214lv2.so " \ - "--jdbc-path /downloads/JDBC/exajdbc.jar" - language_definition = f"--script-languages '{self.language_definition}'" - language_path = f"--lang-path /tests/lang" - language = "" - if self.language is not None: - language = "--lang %s" % self.language - test_restrictions = " ".join(self.test_restrictions) - test_file = f'"{self.test_file}"' - args = " ".join([test_file, - server, - credentials, - language_definition, - language_path, - log_level, - environment, - language, - test_restrictions]) - cmd = f'cd /tests/test/; python3 {args}' - bash_cmd = f"""bash -c "{cmd}" """ - return bash_cmd + def generate_test_command(self, odbc_driver: str) -> str: + def quote(s): + return f"'{s}'" + + def command_line(): + host = self._database_info.host + port = self._database_info.ports.database + yield from [ + "cd /tests/test/;", + "python3", + quote(self.test_file), + "--server", quote(f"{host}:{port}"), + "--user", quote(self.db_user), + "--password", quote(self.db_password), + "--script-languages", quote(self.language_definition), + "--lang-path", "/tests/lang", + f"--loglevel={self.test_log_level}", + f"--driver={odbc_driver}", + "--jdbc-path", "/downloads/JDBC/exajdbc.jar", + ] + if self.language is not None: + yield from [ "--lang", self.language ] + yield from self.test_restrictions + + command = " ".join([ e for e in command_line() ]) + return f'bash -c "{command}"' diff --git a/poetry.lock b/poetry.lock index fc736d26..4b5b2e0f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -83,13 +83,13 @@ typecheck = ["mypy"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -296,43 +296,43 @@ six = "*" [[package]] name = "cryptography" -version = "42.0.7" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, - {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, - {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, - {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, - {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, - {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, - {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -378,44 +378,45 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "docker" -version = "6.1.3" +version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, - {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, ] [package.dependencies] -packaging = ">=14.0" requests = ">=2.26.0" urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" [package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "docutils" -version = "0.21.2" +version = "0.20.1" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] [[package]] name = "exasol-bucketfs" -version = "0.10.0" +version = "0.11.0" description = "BucketFS utilities for the Python programming language" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "exasol_bucketfs-0.10.0-py3-none-any.whl", hash = "sha256:4f5aa81c31c5e03f19daa04d8b455ed09740f9e82bc53f3f9cb47db025146625"}, - {file = "exasol_bucketfs-0.10.0.tar.gz", hash = "sha256:033ee923728037af4d7771d9c6855e9eed2389d842c98a8456f937c917c395f8"}, + {file = "exasol_bucketfs-0.11.0-py3-none-any.whl", hash = "sha256:9eb42c6df5804aa104646e141bd22f4f85c43cc9d563254f9b9ab7293a574120"}, + {file = "exasol_bucketfs-0.11.0.tar.gz", hash = "sha256:6dc2639336816dc57383095eafbfd811ade737f9290191ee5cefa55a22f581df"}, ] [package.dependencies] @@ -439,18 +440,19 @@ files = [ [[package]] name = "exasol-integration-test-docker-environment" -version = "3.0.0" +version = "3.1.0" description = "Integration Test Docker Environment for Exasol" optional = false -python-versions = ">=3.8,<4" +python-versions = "<4,>=3.8" files = [ - {file = "exasol_integration_test_docker_environment-3.0.0-py3-none-any.whl", hash = "sha256:0c041f7a0acb10fd4fee5dcf159984fcd347f123566f761fb8ec3e495570e778"}, - {file = "exasol_integration_test_docker_environment-3.0.0.tar.gz", hash = "sha256:4ca6ee1e3b60c0e8bb0adc13625276a5ccfc0fb749393dd3daf904eede246860"}, + {file = "exasol_integration_test_docker_environment-3.1.0-py3-none-any.whl", hash = "sha256:ce3cf917bd660ef5e1c59281ba0c1ea7c7dc3c43c9454040a6e1a3d90124a1e4"}, + {file = "exasol_integration_test_docker_environment-3.1.0.tar.gz", hash = "sha256:dbcbb967bb3458f74c8764a1cfc4f62ab215b79ee754bb4d17d4ef54c74f7f0a"}, ] [package.dependencies] click = ">=7.0" -docker = {version = ">=4.0.0,<7.0.0", markers = "sys_platform != \"win32\""} +docker = {version = ">=4.0.0,<7.0.0 || >7.0.0", markers = "sys_platform != \"win32\""} +docutils = "<=0.20.1" exasol-bucketfs = ">=0.6.0,<2.0.0" exasol-error-reporting = ">=0.4.0,<0.5.0" fabric = ">=3.0.1,<4.0.0" @@ -464,20 +466,19 @@ netaddr = ">=0.7.19" networkx = ">=2.3" portalocker = ">=2.7.0,<3.0.0" pydot = ">=1.4.0" -pytest = ">=7.2.2,<8.0.0" requests = ">=2.21.0" simplejson = ">=3.16.0" "stopwatch.py" = ">=1.0.0" [[package]] name = "exasol-saas-api" -version = "0.5.0" +version = "0.7.0" description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" optional = false python-versions = "<4.0,>=3.8.0" files = [ - {file = "exasol_saas_api-0.5.0-py3-none-any.whl", hash = "sha256:a2b81ad4100dc6d2f0f8dc6d0e2b18a0f67d04c3f7d2eb117e8038fe9db87eee"}, - {file = "exasol_saas_api-0.5.0.tar.gz", hash = "sha256:164f31c23fc54ddda18b0a881880b91cff6cd07926f73c154a9b8f44f4cf9575"}, + {file = "exasol_saas_api-0.7.0-py3-none-any.whl", hash = "sha256:7ffe1a05aa419099bcafa3984af5f750dc2234c8b18170ccda5336b95bac7c09"}, + {file = "exasol_saas_api-0.7.0.tar.gz", hash = "sha256:8d69780cdc876dc206797fea5b2f964a06248f0a087b611ae06ac3646f84a846"}, ] [package.dependencies] @@ -649,22 +650,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -681,17 +682,6 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - [[package]] name = "invoke" version = "2.2.0" @@ -733,13 +723,13 @@ files = [ [[package]] name = "jsonpickle" -version = "3.0.4" -description = "Serialize any Python object to JSON" +version = "3.2.2" +description = "Python library for serializing arbitrary object graphs into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.4-py3-none-any.whl", hash = "sha256:04ae7567a14269579e3af66b76bda284587458d7e8a204951ca8f71a3309952e"}, - {file = "jsonpickle-3.0.4.tar.gz", hash = "sha256:a1b14c8d6221cd8f394f2a97e735ea1d7edc927fbd135b26f2f8700657c8c62b"}, + {file = "jsonpickle-3.2.2-py3-none-any.whl", hash = "sha256:87cd82d237fd72c5a34970e7222dddc0accc13fddf49af84111887ed9a9445aa"}, + {file = "jsonpickle-3.2.2.tar.gz", hash = "sha256:d425fd2b8afe9f5d7d57205153403fbf897782204437882a477e8eed60930f8c"}, ] [package.extras] @@ -850,13 +840,13 @@ files = [ [[package]] name = "netaddr" -version = "1.2.1" +version = "1.3.0" description = "A network address manipulation library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "netaddr-1.2.1-py3-none-any.whl", hash = "sha256:bd9e9534b0d46af328cf64f0e5a23a5a43fca292df221c85580b27394793496e"}, - {file = "netaddr-1.2.1.tar.gz", hash = "sha256:6eb8fedf0412c6d294d06885c110de945cf4d22d2b510d0404f4e06950857987"}, + {file = "netaddr-1.3.0-py3-none-any.whl", hash = "sha256:c2c6a8ebe5554ce33b7d5b3a306b71bbb373e000bbbf2350dd5213cc56e3dbbe"}, + {file = "netaddr-1.3.0.tar.gz", hash = "sha256:5c3c3d9895b551b763779ba7db7a03487dc1f8e3b385af819af341ae9ef6e48a"}, ] [package.extras] @@ -864,32 +854,21 @@ nicer-shell = ["ipython"] [[package]] name = "networkx" -version = "2.8.2" +version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.8" files = [ - {file = "networkx-2.8.2-py3-none-any.whl", hash = "sha256:51d6ae63c24dcd33901357688a2ad20d6bcd38f9a4c5307720048d3a8081059c"}, - {file = "networkx-2.8.2.tar.gz", hash = "sha256:ae99c9b0d35e5b4a62cf1cfea01e5b3633d8d02f4a0ead69685b6e7de5b85eab"}, + {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, + {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, ] [package.extras] default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=0.942)", "pre-commit (>=2.18)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.3)", "pillow (>=9.1)", "pydata-sphinx-theme (>=0.8.1)", "sphinx (>=4.5)", "sphinx-gallery (>=0.10)", "texext (>=0.6.6)"] +developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.1)", "pytest-cov (>=3.0)"] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "paramiko" @@ -912,30 +891,15 @@ all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1 gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=2.0)"] -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - [[package]] name = "portalocker" -version = "2.8.2" +version = "2.10.0" description = "Wraps the portalocker recipe for easy usage" optional = false python-versions = ">=3.8" files = [ - {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, - {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, + {file = "portalocker-2.10.0-py3-none-any.whl", hash = "sha256:48944147b2cd42520549bc1bb8fe44e220296e56f7c3d551bc6ecce69d9b0de1"}, + {file = "portalocker-2.10.0.tar.gz", hash = "sha256:49de8bc0a2f68ca98bf9e219c81a3e6b27097c7bf505a87c5a112ce1aaeb9b81"}, ] [package.dependencies] @@ -1027,28 +991,6 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - [[package]] name = "python-daemon" version = "3.0.1" @@ -1108,13 +1050,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1129,18 +1071,18 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "70.0.0" +version = "70.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simplejson" @@ -1295,13 +1237,13 @@ files = [ [[package]] name = "tenacity" -version = "8.3.0" +version = "8.4.2" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" files = [ - {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, - {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, + {file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"}, + {file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"}, ] [package.extras] @@ -1319,35 +1261,24 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "tornado" -version = "6.4" +version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] [[package]] @@ -1370,13 +1301,13 @@ test = ["mypy (>=1.2.0)", "pytest (>=7)"] [[package]] name = "types-requests" -version = "2.32.0.20240521" +version = "2.32.0.20240622" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240521.tar.gz", hash = "sha256:c5c4a0ae95aad51f1bf6dae9eed04a78f7f2575d4b171da37b622e08b93eb5d3"}, - {file = "types_requests-2.32.0.20240521-py3-none-any.whl", hash = "sha256:ab728ba43ffb073db31f21202ecb97db8753ded4a9dc49cb480d8a5350c5c421"}, + {file = "types-requests-2.32.0.20240622.tar.gz", hash = "sha256:ed5e8a412fcc39159d6319385c009d642845f250c63902718f605cd90faade31"}, + {file = "types_requests-2.32.0.20240622-py3-none-any.whl", hash = "sha256:97bac6b54b5bd4cf91d407e62f0932a74821bc2211f22116d9ee1dd643826caf"}, ] [package.dependencies] @@ -1384,24 +1315,24 @@ urllib3 = ">=2" [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1410,22 +1341,6 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - [[package]] name = "wrapt" version = "1.16.0" @@ -1507,20 +1422,20 @@ files = [ [[package]] name = "zipp" -version = "3.18.2" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, - {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "052abcae475bc78ddd06e62e69265e6dba79890195def0de725ae0702b44ab8a" +content-hash = "438d41d797bd14d4142f307342b2f60f3f7403ef868a985ba8ded89a451c81c4" diff --git a/pyproject.toml b/pyproject.toml index 3a3bf8ef..411e4211 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "exasol-script-languages-container-tool" -version = "0.19.0" +version = "0.20.0" description = "Script Languages Container Tool" license = "MIT" @@ -10,19 +10,23 @@ authors = [ ] readme = 'README.md' # Markdown files are supported - repository = "https://github.com/exasol/script-languages-container-tool" homepage = "https://github.com/exasol/script-languages-container-tool" - keywords = ['exasol', 'udf', 'script-languages'] +[tool.poetry.urls] +"Homepage" = "https://github.com/exasol/script-languages-container-tool" +"Documentation" = "https://github.com/exasol/script-languages-container-tool" +"Source" = "https://github.com/exasol/script-languages-container-tool" +"Issues" = "https://github.com/exasol/script-languages-container-tool/issues" + [tool.poetry.dependencies] python = ">=3.10,<4" importlib_metadata = ">=4.6.0" importlib-resources = ">=5.4.0" -networkx = "2.8.2" # We pinned networkx to this version, because in newer versions it throws an exception, see https://github.com/exasol/integration-test-docker-environment/issues/228 -exasol-integration-test-docker-environment = "^3.0.0" -requests="2.31.0" # Pinned because of https://github.com/psf/requests/issues/6707 / https://github.com/docker/docker-py/issues/3256 +networkx = "^2.8.2" +exasol-integration-test-docker-environment = "^3.1.0" +requests="^2.31.0" [build-system] requires = ["poetry_core>=1.0.0"] diff --git a/release_config.yml b/release_config.yml deleted file mode 100644 index f18a4be8..00000000 --- a/release_config.yml +++ /dev/null @@ -1,14 +0,0 @@ -community-tags: - - CI - - continuous integration - - docker - - docker-db - - script languages - - UDFs -community-project-name: Script Languages Container Tool -community-project-description: | - The Script-Languages-Container-Tool (exaslct) is the build tool for the script language container. - You can build, export and upload script-language container from so-called flavors which are description how to build the script language container. - [Script-Languages-Container-Tool](https://github.com/exasol/script-languages-container-tool). -release-platforms: - - GitHub diff --git a/scripts/test/ci_tests/CentOs7/Dockerfile b/scripts/test/ci_tests/CentOs7/Dockerfile deleted file mode 100644 index d0aff6da..00000000 --- a/scripts/test/ci_tests/CentOs7/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM centos:7 - -RUN yum install -y htop atop mc -RUN yum install -y \ - https://download.docker.com/linux/centos/7/x86_64/stable/Packages/docker-ce-17.06.0.ce-1.el7.centos.x86_64.rpm && \ - yum clean all - -COPY run_ci_test_within_docker_bash_below_4.4.sh /run_ci_test_within_docker_bash_below_4.4.sh -ENTRYPOINT ["/run_ci_test_within_docker_bash_below_4.4.sh"] diff --git a/test/resources/test_container/full/build/Dockerfile b/test/resources/test_container/full/build/Dockerfile index dbd6e51d..ddad48be 100644 --- a/test/resources/test_container/full/build/Dockerfile +++ b/test/resources/test_container/full/build/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:22.04 ENV DEBIAN_FRONTEND=noninteractive COPY deps/01_nodoc /etc/dpkg/dpkg.cfg.d/01_nodoc @@ -7,7 +7,7 @@ RUN apt-get -y update && \ apt-get -y install \ openjdk-11-jre \ locales \ - python3.8-venv \ + python3.10-venv \ python3-pip \ unixodbc \ unixodbc-dev \ @@ -22,10 +22,14 @@ RUN apt-get -y update && \ RUN curl -fsSL https://get.docker.com | bash RUN mkdir -p downloads/ODBC downloads/JDBC downloads/EXAplus -RUN curl -s https://exasol-script-languages-dependencies.s3.eu-central-1.amazonaws.com/EXASOL_ODBC-7.0.11.tar.gz | tar -C downloads/ODBC --strip-components 1 -zxf - -RUN curl -s https://x-up.s3.amazonaws.com/7.x/7.1.14/EXASOL_JDBC-7.1.14.tar.gz | tar -C downloads/JDBC --strip-components 1 -zxf - -RUN curl -s https://exasol-script-languages-dependencies.s3.eu-central-1.amazonaws.com/EXAplus-7.0.11.tar.gz | tar -C downloads/EXAplus --strip-components 1 -zxf - + +# For latest drivers see https://downloads.exasol.com/clients-and-drivers/odbc + +RUN curl -s https://x-up.s3.amazonaws.com/7.x/24.1.1/Exasol_ODBC-24.1.1-Linux_x86_64.tar.gz | tar -C downloads/ODBC --strip-components 2 -zxf - +RUN curl -s https://x-up.s3.amazonaws.com/7.x/24.1.1/Exasol_JDBC-24.1.1.tar.gz | tar -C downloads/JDBC --strip-components 2 -zxf - +RUN curl -s https://x-up.s3.amazonaws.com/7.x/24.1.1/EXAplus-24.1.1.tar.gz | tar -C downloads/EXAplus --strip-components 2 -zxf - ENV EXAPLUS=/downloads/EXAplus/exaplus COPY deps/requirements.txt requirements.txt -RUN pip3 install -r "requirements.txt" +RUN python3.10 -m pip install pip --upgrade +RUN python3.10 -m pip install -r requirements.txt diff --git a/test/resources/test_container/full/build/deps/requirements.txt b/test/resources/test_container/full/build/deps/requirements.txt index 69717cf9..d83f68a2 100644 --- a/test/resources/test_container/full/build/deps/requirements.txt +++ b/test/resources/test_container/full/build/deps/requirements.txt @@ -1,8 +1,7 @@ -pyodbc<5.0.0 pytz lxml docker numpy scipy -requests==2.31.0 -https://github.com/exasol/exasol-python-test-framework/releases/download/0.1.0/exasol_python_test_framework-0.1.0-py3-none-any.whl +requests +exasol_python_test_framework diff --git a/test/test_run_db_test_builtin_languages.py b/test/test_run_db_test_builtin_languages.py index ca906919..a012cae6 100644 --- a/test/test_run_db_test_builtin_languages.py +++ b/test/test_run_db_test_builtin_languages.py @@ -15,8 +15,14 @@ def tearDown(self): utils.close_environments(self.test_environment) def test_builtin_languages(self): - command = f"{self.test_environment.executable} run-db-test --test-file test_builtin_languages.py " \ - f"{exaslct_utils.get_full_test_container_folder_parameter()}" + # optionally add "--reuse-test-environment" here + command = " ".join([ + str(self.test_environment.executable), + "run-db-test", + "--test-file", + "test_builtin_languages.py", + exaslct_utils.get_full_test_container_folder_parameter(), + ]) self.test_environment.run_command(command, track_task_dependencies=True)