From f8a3514f83f4e3d08c6c36029c297db4d1169fa3 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 10:38:17 +0200 Subject: [PATCH 01/39] Add an actual clean command. --- docs/make.bat | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/make.bat b/docs/make.bat index 6247f7e231..3029a0fa56 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -11,6 +11,7 @@ set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help +if "%1" == "clean" goto clean %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( @@ -28,6 +29,30 @@ if errorlevel 9009 ( %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end +:clean +echo.Cleaning files form previous build... +IF EXIST "build" ( + rmdir "build" /s /q +) +IF EXIST "source\images\auto-generated" ( + rmdir "source\images\auto-generated" /s /q +) +IF EXIST "source\examples\07-python-operators\plugins" ( + robocopy "source\examples\07-python-operators\plugins" "source\_temp\plugins" /E +) +IF EXIST "source\examples" ( + rmdir "source\examples" /s /q +) +IF EXIST "source\_temp\plugins" ( + robocopy "source\_temp\plugins" "source\examples\07-python-operators\plugins" /E +) +IF EXIST "source\_temp" ( + rmdir "source\_temp" /s /q +) + +echo.Done. +goto end + :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% From 5651022fc2d6fe23797b6c928d32d302f2b29fb0 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 10:40:58 +0200 Subject: [PATCH 02/39] Add an actual clean command. --- docs/make.bat | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/make.bat b/docs/make.bat index 3029a0fa56..1d97091ad5 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -38,13 +38,13 @@ IF EXIST "source\images\auto-generated" ( rmdir "source\images\auto-generated" /s /q ) IF EXIST "source\examples\07-python-operators\plugins" ( - robocopy "source\examples\07-python-operators\plugins" "source\_temp\plugins" /E + robocopy "source\examples\07-python-operators\plugins" "source\_temp\plugins" /E >nul 2>&1 ) IF EXIST "source\examples" ( rmdir "source\examples" /s /q ) IF EXIST "source\_temp\plugins" ( - robocopy "source\_temp\plugins" "source\examples\07-python-operators\plugins" /E + robocopy "source\_temp\plugins" "source\examples\07-python-operators\plugins" /E >nul 2>&1 ) IF EXIST "source\_temp" ( rmdir "source\_temp" /s /q From 59a7ec7dd2889aa3c830ce9ce92390c25aa87de1 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 10:58:35 +0200 Subject: [PATCH 03/39] Set default language as "en" in conf.py to remove warning. --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index b01593e1b7..f2cc76007b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -85,7 +85,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From a399da931a17204cce5651e9b7726a4c38866ef0 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 11:43:08 +0200 Subject: [PATCH 04/39] Fix 08.averaging\README.txt --- examples/08-averaging/README.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/08-averaging/README.txt b/examples/08-averaging/README.txt index 60290c4205..ce736c6250 100644 --- a/examples/08-averaging/README.txt +++ b/examples/08-averaging/README.txt @@ -1,4 +1,4 @@ -.. _averaging_examples +.. _averaging_examples: Averaging examples ================== From a748683b2000dbc1a32172df7a6f16ab16c8ca11 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 11:44:38 +0200 Subject: [PATCH 05/39] Fix indent --- examples/08-averaging/00-compute_and_average.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/08-averaging/00-compute_and_average.py b/examples/08-averaging/00-compute_and_average.py index 712433f947..e32f13366a 100644 --- a/examples/08-averaging/00-compute_and_average.py +++ b/examples/08-averaging/00-compute_and_average.py @@ -146,8 +146,7 @@ def average_then_compute_von_mises(analysis): # Plot the results # ~~~~~~~~~~~~~~~~ # Plot both von Mises stress fields side by side to compare them. -# - The first plot displays the results when the equivalent stresses are calculated -# first. +# - The first plot displays the results when the equivalent stresses are calculated first. # - The second plot shows the results when the averaging is done first. # From f122405f702e16d5223505c275df32f0b850c095 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 11:46:15 +0200 Subject: [PATCH 06/39] Fix indent --- examples/03-advanced/10-asme_secviii_divtwo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/03-advanced/10-asme_secviii_divtwo.py b/examples/03-advanced/10-asme_secviii_divtwo.py index c65baf4229..d82e0478cd 100644 --- a/examples/03-advanced/10-asme_secviii_divtwo.py +++ b/examples/03-advanced/10-asme_secviii_divtwo.py @@ -41,7 +41,7 @@ # # - ``alfasl`` = input("Introduce ``alfasl`` parameter from ASME\n") # - ``alfasl`` = float(alfasl) -# -``m2`` = input("Introduce ``m2`` parameter from ASME\n") +# - ``m2`` = input("Introduce ``m2`` parameter from ASME\n") # - ``m2`` = float(m2) # # For this exercise, ``alfasl`` = 2.2 and ``m2`` = .288, which is the same From f4ba135911b6aae3a349f48eb3c971d1e9925140 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 11:49:55 +0200 Subject: [PATCH 07/39] Fix warnings --- examples/03-advanced/04-extrapolation_stress_3d.py | 2 +- examples/03-advanced/05-extrapolation_strain_2d.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/03-advanced/04-extrapolation_stress_3d.py b/examples/03-advanced/04-extrapolation_stress_3d.py index 036e7dc010..6d037cb486 100644 --- a/examples/03-advanced/04-extrapolation_stress_3d.py +++ b/examples/03-advanced/04-extrapolation_stress_3d.py @@ -69,7 +69,7 @@ ############################################################################### # Nodal stress result of integration points ############################################################################### -# The MAPLD command ``ERESX,NO``is used to copy directly the +# The MAPLD command ``ERESX,NO`` is used to copy directly the # Gaussian (integration) points results to the nodes, instead of the # results at nodes or elements (which are interpolation of results at a # few gauss points). diff --git a/examples/03-advanced/05-extrapolation_strain_2d.py b/examples/03-advanced/05-extrapolation_strain_2d.py index 71fddf10ec..57d68e2b4e 100644 --- a/examples/03-advanced/05-extrapolation_strain_2d.py +++ b/examples/03-advanced/05-extrapolation_strain_2d.py @@ -53,7 +53,7 @@ ############################################################################### # Extrapolate from integration points for elastic strain result # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# This example uses the ``gauss_to_node_fc``operator to compute nodal component +# This example uses the ``gauss_to_node_fc`` operator to compute nodal component # elastic strain results from the elastic strain at the integration points. # Create elastic strain operator to get strain result of integration points From 75032b0c1cbe84854fa682a919c800fc923c7df7 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 12:56:59 +0200 Subject: [PATCH 08/39] Exclude examples/python_plugins from APIDOC as well as examples/examples.py. Add verbose to make html. --- .ci/build_doc.bat | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.ci/build_doc.bat b/.ci/build_doc.bat index b299a1c361..59503c0146 100644 --- a/.ci/build_doc.bat +++ b/.ci/build_doc.bat @@ -3,10 +3,11 @@ call sphinx-apidoc -o ../docs/source/api ../ansys ../ansys/dpf/core/log.py ^ ../ansys/dpf/core/help.py ../ansys/dpf/core/mapping_types.py ../ansys/dpf/core/ipconfig.py ^ ../ansys/dpf/core/field_base.py ../ansys/dpf/core/cache.py ../ansys/dpf/core/misc.py ^ ../ansys/dpf/core/check_version.py ../ansys/dpf/core/operators/build.py ../ansys/dpf/core/operators/specification.py ^ -../ansys/dpf/core/vtk_helper.py ../ansys/dpf/core/label_space.py ^ +../ansys/dpf/core/vtk_helper.py ../ansys/dpf/core/label_space.py ../ansys/dpf/core/examples/python_plugins/* ^ +../ansys/dpf/core/examples/examples.py ^ -f --implicit-namespaces --separate --no-headings pushd . cd ../docs/ call make clean -call make html +call make html -v -v -v popd From 0f81110725b0242f8c9b95396c6cf036dcfb6da9 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 12:58:19 +0200 Subject: [PATCH 09/39] Indicate to autodoc that ansys.dpf.core.examples.python_plugins should be excluded. --- docs/source/conf.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f2cc76007b..aedbacb5fe 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -67,6 +67,7 @@ autosummary_generate = True +autodoc_mock_imports = ["ansys.dpf.core.examples.python_plugins"] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] @@ -124,12 +125,16 @@ # -- Options for HTML output ------------------------------------------------- +html_short_title = html_title = "PyDPF-Core" html_theme = "ansys_sphinx_theme" html_logo = pyansys_logo_black html_theme_options = { - "github_url": "https://github.com/pyansys/DPF-Core", + "github_url": "https://github.com/pyansys/pydpf-core", "show_prev_next": False, - "logo_link": "https://dpfdocs.pyansys.com/" # navigate to the main page + "show_breadcrumbs": True, + "additional_breadcrumbs": [ + ("PyAnsys", "https://docs.pyansys.com/"), + ], } From 7352a5a33b9d79835e4e50654363f145359c9d62 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 12:59:08 +0200 Subject: [PATCH 10/39] Resolve sphinx warning --- docs/source/user_guide/operators.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/user_guide/operators.rst b/docs/source/user_guide/operators.rst index 4a6c70556a..bce9dceac2 100644 --- a/docs/source/user_guide/operators.rst +++ b/docs/source/user_guide/operators.rst @@ -126,7 +126,7 @@ You can create data sources in two ways: Because several other examples use the ``Model`` class, this example uses the -``DataSources``class: +``DataSources`` class: .. code-block:: python @@ -168,7 +168,7 @@ can also be connected to work on a temporal subset: Evaluate operators ~~~~~~~~~~~~~~~~~~ -With all the required inputs assigned, you can output the :class:`ansys.dpf.core.fields_container`_ +With all the required inputs assigned, you can output the :class:`ansys.dpf.core.fields_container` class from the operator: .. code-block:: python From ec1b3339ed5bbb7958e0037db039e82b6c1a8a0c Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 13:11:59 +0200 Subject: [PATCH 11/39] Try installing pandoc --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37a06712fc..aafdda3971 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -243,6 +243,10 @@ jobs: - name: "Setup Graphviz" uses: ts-graphviz/setup-graphviz@v1 + - name: "Install OS packages" + run: | + choco install pandoc + - name: "Install documentation packages for Python" run: | pip install -r requirements/requirements_docs.txt From 6f5e080f0cd8e7af735ac5398e37c63f561d9021 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 13:14:22 +0200 Subject: [PATCH 12/39] Comment-out other jobs --- .github/workflows/ci.yml | 834 +++++++++++++++++++-------------------- 1 file changed, 417 insertions(+), 417 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aafdda3971..4cf532ad84 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,170 +46,170 @@ jobs: run: | make flake8 - build_test: - name: "Build and Test" - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] - os: ["windows-latest", "ubuntu-latest"] - - steps: - - uses: actions/checkout@v3 - - - name: "Build Package" - uses: pyansys/pydpf-actions/build_package@v2.2.dev1 - with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{env.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} - dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheelhouse: true - extra-pip-args: ${{ env.extra }} - - - name: "Prepare Testing Environment" - uses: pyansys/pydpf-actions/prepare_tests@v2.2.dev1 - with: - DEBUG: true - - - name: "Test Docstrings" - uses: pyansys/pydpf-actions/test_docstrings@v2.2.dev1 - if: matrix.python-version != '3.10' && matrix.python-version != '3.7' && matrix.os != 'Linux' - with: - MODULE: ${{env.MODULE}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - - - name: "Separate long Core tests" - shell: pwsh - run: | - New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" - New-Item -Path ".\" -Name "test_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" - New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" - New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" - Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" - Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" - Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" - Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" - Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" - Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" - Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" - Remove-Item -Path "tests\test_server.py" - Remove-Item -Path "tests\test_launcher.py" - Remove-Item -Path "tests\test_local_server.py" - Remove-Item -Path "tests\test_multi_server.py" - Remove-Item -Path "tests\test_workflow.py" - Remove-Item -Path "tests\test_remote_workflow.py" - Remove-Item -Path "tests\test_remote_operator.py" - - - name: "Test API" - shell: bash - working-directory: tests - run: | - pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_launcher" - shell: bash - working-directory: test_launcher - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_server" - shell: bash - working-directory: test_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_local_server" - shell: bash - working-directory: test_local_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_multi_server" - shell: bash - working-directory: test_multi_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_remote_workflow" - shell: bash - working-directory: test_remote_workflow - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_remote_operator" - shell: bash - working-directory: test_remote_operator - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_workflow" - shell: bash - working-directory: test_workflow - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Upload Test Results" - uses: actions/upload-artifact@v2 - with: - name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ env.ANSYS_VERSION }} - path: tests/junit/test-results.xml - if: always() - - - name: "Upload coverage to Codecov" - uses: codecov/codecov-action@v3 +# build_test: +# name: "Build and Test" +# runs-on: ${{ matrix.os }} +# strategy: +# fail-fast: false +# matrix: +# python-version: ["3.7", "3.8", "3.9", "3.10"] +# os: ["windows-latest", "ubuntu-latest"] +# +# steps: +# - uses: actions/checkout@v3 +# +# - name: "Build Package" +# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 +# with: +# python-version: ${{ matrix.python-version }} +# ANSYS_VERSION: ${{env.ANSYS_VERSION}} +# PACKAGE_NAME: ${{env.PACKAGE_NAME}} +# MODULE: ${{env.MODULE}} +# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} +# install_extras: plotting +# wheelhouse: true +# extra-pip-args: ${{ env.extra }} +# +# - name: "Prepare Testing Environment" +# uses: pyansys/pydpf-actions/prepare_tests@v2.2.dev1 +# with: +# DEBUG: true +# +# - name: "Test Docstrings" +# uses: pyansys/pydpf-actions/test_docstrings@v2.2.dev1 +# if: matrix.python-version != '3.10' && matrix.python-version != '3.7' && matrix.os != 'Linux' +# with: +# MODULE: ${{env.MODULE}} +# PACKAGE_NAME: ${{env.PACKAGE_NAME}} +# +# - name: "Separate long Core tests" +# shell: pwsh +# run: | +# New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" +# New-Item -Path ".\" -Name "test_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" +# New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" +# New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" +# Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" +# Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" +# Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" +# Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" +# Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" +# Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" +# Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" +# Remove-Item -Path "tests\test_server.py" +# Remove-Item -Path "tests\test_launcher.py" +# Remove-Item -Path "tests\test_local_server.py" +# Remove-Item -Path "tests\test_multi_server.py" +# Remove-Item -Path "tests\test_workflow.py" +# Remove-Item -Path "tests\test_remote_workflow.py" +# Remove-Item -Path "tests\test_remote_operator.py" +# +# - name: "Test API" +# shell: bash +# working-directory: tests +# run: | +# pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_launcher" +# shell: bash +# working-directory: test_launcher +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_server" +# shell: bash +# working-directory: test_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_local_server" +# shell: bash +# working-directory: test_local_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_multi_server" +# shell: bash +# working-directory: test_multi_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_remote_workflow" +# shell: bash +# working-directory: test_remote_workflow +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_remote_operator" +# shell: bash +# working-directory: test_remote_operator +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_workflow" +# shell: bash +# working-directory: test_workflow +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Upload Test Results" +# uses: actions/upload-artifact@v2 +# with: +# name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ env.ANSYS_VERSION }} +# path: tests/junit/test-results.xml +# if: always() +# +# - name: "Upload coverage to Codecov" +# uses: codecov/codecov-action@v3 docs: name: "Documentation" @@ -308,259 +308,259 @@ jobs: name: HTML-doc-${{env.PACKAGE_NAME}} path: HTML-doc-${{env.PACKAGE_NAME}}.zip - run_examples: - name: "Run Examples with/without bin" - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.8"] - os: ["windows-latest"] - - steps: - - uses: actions/checkout@v3 - - - name: Setup Python - uses: actions/setup-python@v4.2.0 - with: - python-version: ${{ matrix.python-version }} - - - name: "Build Package" - uses: pyansys/pydpf-actions/build_package@v2.2.dev1 - with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{env.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} - dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheelhouse: false - wheel: false - extra-pip-args: ${{ env.extra }} - - - name: "Setup headless display" - uses: pyvista/setup-headless-display-action@v1 - - - name: "Check examples with gatebin" - shell: bash - run: | - echo on - cd .ci - ls . - python run_examples.py - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Uninstall gatebin" - shell: bash - run: | - pip uninstall -y ansys-dpf-gatebin - if: always() - - - name: "Check sanity without gatebin INPROCESS" - shell: bash - run: | - cd .ci - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: INPROCESS - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Check sanity without gatebin GRPC" - shell: bash - run: | - cd .ci - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: GRPC - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Check sanity without gatebin LEGACYGRPC" - shell: bash - run: | - cd .ci - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: LEGACYGRPC - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - retro: - name: "Retro-compatibility" - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.8"] - os: ["windows-latest", "ubuntu-latest"] - ANSYS_VERSION: ["222", "221"] - steps: - - uses: actions/checkout@v3 - - - name: "Build Package" - uses: pyansys/pydpf-actions/build_package@v2.2.dev1 - with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{matrix.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} - dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: false - extra-pip-args: ${{ env.extra }} - - - name: "Install ansys-grpc-dpf==0.4.0" - shell: pwsh - run: | - pip install ansys-grpc-dpf==0.4.0 - if: matrix.ANSYS_VERSION == '221' - - - name: "Prepare Testing Environment" - uses: pyansys/pydpf-actions/prepare_tests@v2.2 - with: - DEBUG: true - - - name: "Separate long Core tests" - shell: pwsh - run: | - New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" - New-Item -Path ".\" -Name "test_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" - New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" - New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" - New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" - Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" - Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" - Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" - Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" - Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" - Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" - Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" - Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" - Remove-Item -Path "tests\test_server.py" - Remove-Item -Path "tests\test_launcher.py" - Remove-Item -Path "tests\test_local_server.py" - Remove-Item -Path "tests\test_multi_server.py" - Remove-Item -Path "tests\test_workflow.py" - Remove-Item -Path "tests\test_remote_workflow.py" - Remove-Item -Path "tests\test_remote_operator.py" - - - name: "Test API" - shell: bash - working-directory: tests - run: | - pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_launcher" - shell: bash - working-directory: test_launcher - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_server" - shell: bash - working-directory: test_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_local_server" - shell: bash - working-directory: test_local_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_multi_server" - shell: bash - working-directory: test_multi_server - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_remote_workflow" - shell: bash - working-directory: test_remote_workflow - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_remote_operator" - shell: bash - working-directory: test_remote_operator - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Test API test_workflow" - shell: bash - working-directory: test_workflow - run: | - pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . - if: always() - - - name: "Kill all servers" - uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 - if: always() - - - name: "Upload Test Results" - uses: actions/upload-artifact@v2 - with: - name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ matrix.ANSYS_VERSION }} - path: tests/junit/test-results.xml - if: always() - - - name: "Upload coverage to Codecov" - uses: codecov/codecov-action@v3 +# run_examples: +# name: "Run Examples with/without bin" +# runs-on: ${{ matrix.os }} +# strategy: +# fail-fast: false +# matrix: +# python-version: ["3.8"] +# os: ["windows-latest"] +# +# steps: +# - uses: actions/checkout@v3 +# +# - name: Setup Python +# uses: actions/setup-python@v4.2.0 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: "Build Package" +# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 +# with: +# python-version: ${{ matrix.python-version }} +# ANSYS_VERSION: ${{env.ANSYS_VERSION}} +# PACKAGE_NAME: ${{env.PACKAGE_NAME}} +# MODULE: ${{env.MODULE}} +# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} +# install_extras: plotting +# wheelhouse: false +# wheel: false +# extra-pip-args: ${{ env.extra }} +# +# - name: "Setup headless display" +# uses: pyvista/setup-headless-display-action@v1 +# +# - name: "Check examples with gatebin" +# shell: bash +# run: | +# echo on +# cd .ci +# ls . +# python run_examples.py +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Uninstall gatebin" +# shell: bash +# run: | +# pip uninstall -y ansys-dpf-gatebin +# if: always() +# +# - name: "Check sanity without gatebin INPROCESS" +# shell: bash +# run: | +# cd .ci +# python run_non_regression_examples.py +# env: +# DPF_SERVER_TYPE: INPROCESS +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Check sanity without gatebin GRPC" +# shell: bash +# run: | +# cd .ci +# python run_non_regression_examples.py +# env: +# DPF_SERVER_TYPE: GRPC +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Check sanity without gatebin LEGACYGRPC" +# shell: bash +# run: | +# cd .ci +# python run_non_regression_examples.py +# env: +# DPF_SERVER_TYPE: LEGACYGRPC +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() + +# retro: +# name: "Retro-compatibility" +# runs-on: ${{ matrix.os }} +# strategy: +# fail-fast: false +# matrix: +# python-version: ["3.8"] +# os: ["windows-latest", "ubuntu-latest"] +# ANSYS_VERSION: ["222", "221"] +# steps: +# - uses: actions/checkout@v3 +# +# - name: "Build Package" +# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 +# with: +# python-version: ${{ matrix.python-version }} +# ANSYS_VERSION: ${{matrix.ANSYS_VERSION}} +# PACKAGE_NAME: ${{env.PACKAGE_NAME}} +# MODULE: ${{env.MODULE}} +# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} +# install_extras: plotting +# wheel: false +# extra-pip-args: ${{ env.extra }} +# +# - name: "Install ansys-grpc-dpf==0.4.0" +# shell: pwsh +# run: | +# pip install ansys-grpc-dpf==0.4.0 +# if: matrix.ANSYS_VERSION == '221' +# +# - name: "Prepare Testing Environment" +# uses: pyansys/pydpf-actions/prepare_tests@v2.2 +# with: +# DEBUG: true +# +# - name: "Separate long Core tests" +# shell: pwsh +# run: | +# New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" +# New-Item -Path ".\" -Name "test_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" +# New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" +# New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" +# New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" +# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" +# Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" +# Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" +# Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" +# Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" +# Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" +# Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" +# Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" +# Remove-Item -Path "tests\test_server.py" +# Remove-Item -Path "tests\test_launcher.py" +# Remove-Item -Path "tests\test_local_server.py" +# Remove-Item -Path "tests\test_multi_server.py" +# Remove-Item -Path "tests\test_workflow.py" +# Remove-Item -Path "tests\test_remote_workflow.py" +# Remove-Item -Path "tests\test_remote_operator.py" +# +# - name: "Test API" +# shell: bash +# working-directory: tests +# run: | +# pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_launcher" +# shell: bash +# working-directory: test_launcher +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_server" +# shell: bash +# working-directory: test_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_local_server" +# shell: bash +# working-directory: test_local_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_multi_server" +# shell: bash +# working-directory: test_multi_server +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_remote_workflow" +# shell: bash +# working-directory: test_remote_workflow +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_remote_operator" +# shell: bash +# working-directory: test_remote_operator +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Test API test_workflow" +# shell: bash +# working-directory: test_workflow +# run: | +# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . +# if: always() +# +# - name: "Kill all servers" +# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 +# if: always() +# +# - name: "Upload Test Results" +# uses: actions/upload-artifact@v2 +# with: +# name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ matrix.ANSYS_VERSION }} +# path: tests/junit/test-results.xml +# if: always() +# +# - name: "Upload coverage to Codecov" +# uses: codecov/codecov-action@v3 draft_release: name: "Draft Release" From ce9d569cddbf7e9165778042875677c10ba108c0 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 13:15:11 +0200 Subject: [PATCH 13/39] Comment-out other jobs --- .github/workflows/ci.yml | 52 ++++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4cf532ad84..403bd6a5fa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -562,32 +562,32 @@ jobs: # - name: "Upload coverage to Codecov" # uses: codecov/codecov-action@v3 - draft_release: - name: "Draft Release" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') - needs: [style, build_test, docs, run_examples, retro] - runs-on: ubuntu-latest - steps: - - name: "Set up Python" - uses: actions/setup-python@v4.1.0 - with: - python-version: 3.9 - - - name: "Download artifacts" - uses: actions/download-artifact@v3 - - - name: "Display downloaded files" - run: ls -R - - - name: "Create draft release" - uses: softprops/action-gh-release@v1 - with: - files: | - ./**/*.whl - ./**/*.tar.gz - ./**/*.zip - draft: true - generate_release_notes: true +# draft_release: +# name: "Draft Release" +# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') +# needs: [style, build_test, docs, run_examples, retro] +# runs-on: ubuntu-latest +# steps: +# - name: "Set up Python" +# uses: actions/setup-python@v4.1.0 +# with: +# python-version: 3.9 +# +# - name: "Download artifacts" +# uses: actions/download-artifact@v3 +# +# - name: "Display downloaded files" +# run: ls -R +# +# - name: "Create draft release" +# uses: softprops/action-gh-release@v1 +# with: +# files: | +# ./**/*.whl +# ./**/*.tar.gz +# ./**/*.zip +# draft: true +# generate_release_notes: true # - name: "Upload to Test PyPi" # Change TOKEN # run: | From f8afb5ff95bfbf6ab7bca7605567457ec0f1b248 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 18:34:57 +0200 Subject: [PATCH 14/39] Add -P as argument to sphinx build, which runs the Python debugger pdb if an unhandled exception occurs while building. (https://www.sphinx-doc.org/en/master/man/sphinx-build.html#cmdoption-sphinx-build-P) --- .ci/build_doc.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ci/build_doc.bat b/.ci/build_doc.bat index 59503c0146..3fa73eaf3e 100644 --- a/.ci/build_doc.bat +++ b/.ci/build_doc.bat @@ -9,5 +9,5 @@ call sphinx-apidoc -o ../docs/source/api ../ansys ../ansys/dpf/core/log.py ^ pushd . cd ../docs/ call make clean -call make html -v -v -v +call make html -v -v -v -P popd From 54e788a372f42eb333cb0711d2c798488fe69d14 Mon Sep 17 00:00:00 2001 From: PProfizi Date: Fri, 7 Oct 2022 18:48:14 +0200 Subject: [PATCH 15/39] Add a reset function after each example equivalent to the kill-dpf-servers action. --- docs/source/conf.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index aedbacb5fe..158f18827f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -100,6 +100,22 @@ # -- Sphinx Gallery Options from sphinx_gallery.sorting import FileNameSortKey + +def reset_servers(gallery_conf, fname): + import psutil + proc_name = "Ans.Dpf.Grpc" + nb_procs = 0 + for proc in psutil.process_iter(): + try: + # check whether the process name matches + if proc_name in proc.name(): + proc.kill() + nb_procs += 1 + except psutil.NoSuchProcess: + pass + print(f"Killed {nb_procs} {proc_name} processes.") + + sphinx_gallery_conf = { # convert rst to md for ipynb "pypandoc": True, @@ -119,6 +135,8 @@ # 'first_notebook_cell': ("%matplotlib inline\n" # "from pyvista import set_plot_theme\n" # "set_plot_theme('document')"), + "reset_modules_order": 'after', + "reset_modules": (reset_servers,), } autodoc_member_order = "bysource" From 483a25c4752e7f57005621a4f3f377c30089c7b1 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 10:26:59 +0200 Subject: [PATCH 16/39] try shutting down servers manually. --- examples/00-basic/11-server_types.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/examples/00-basic/11-server_types.py b/examples/00-basic/11-server_types.py index 8e49f4cc83..a090e8534c 100644 --- a/examples/00-basic/11-server_types.py +++ b/examples/00-basic/11-server_types.py @@ -87,3 +87,10 @@ # Go back to default config: dpf.SERVER_CONFIGURATION = initial_config + +in_process_server.shutdown() +in_process_server = None +grpc_server.shutdown() +grpc_server = None +legacy_grpc_server.shutdown() +legacy_grpc_server = None From c24c430d69bc919b231330630425b632ec292c5d Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 10:37:25 +0200 Subject: [PATCH 17/39] Reset as it did not work. --- examples/00-basic/11-server_types.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/examples/00-basic/11-server_types.py b/examples/00-basic/11-server_types.py index a090e8534c..8e49f4cc83 100644 --- a/examples/00-basic/11-server_types.py +++ b/examples/00-basic/11-server_types.py @@ -87,10 +87,3 @@ # Go back to default config: dpf.SERVER_CONFIGURATION = initial_config - -in_process_server.shutdown() -in_process_server = None -grpc_server.shutdown() -grpc_server = None -legacy_grpc_server.shutdown() -legacy_grpc_server = None From 946f91c1ff09168aab59adc251f04254a58c21c6 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 10:37:40 +0200 Subject: [PATCH 18/39] Count servers both before and after each example --- docs/source/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 158f18827f..d698e73aa2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -109,11 +109,11 @@ def reset_servers(gallery_conf, fname): try: # check whether the process name matches if proc_name in proc.name(): - proc.kill() + # proc.kill() nb_procs += 1 except psutil.NoSuchProcess: pass - print(f"Killed {nb_procs} {proc_name} processes.") + print(f"Counted {nb_procs} {proc_name} processes.") sphinx_gallery_conf = { @@ -135,7 +135,7 @@ def reset_servers(gallery_conf, fname): # 'first_notebook_cell': ("%matplotlib inline\n" # "from pyvista import set_plot_theme\n" # "set_plot_theme('document')"), - "reset_modules_order": 'after', + "reset_modules_order": 'both', "reset_modules": (reset_servers,), } From ee1d2f506db5035732ffb7fe4b6c03eff5948b9b Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 11:10:21 +0200 Subject: [PATCH 19/39] Count servers both before and after each example --- docs/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index d698e73aa2..c51b66a409 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -101,7 +101,7 @@ from sphinx_gallery.sorting import FileNameSortKey -def reset_servers(gallery_conf, fname): +def reset_servers(gallery_conf, fname, when): import psutil proc_name = "Ans.Dpf.Grpc" nb_procs = 0 @@ -113,7 +113,7 @@ def reset_servers(gallery_conf, fname): nb_procs += 1 except psutil.NoSuchProcess: pass - print(f"Counted {nb_procs} {proc_name} processes.") + print(f"Counted {nb_procs} {proc_name} processes {when} the example.") sphinx_gallery_conf = { From 23c5ece9adef222aedadafee71f7075bc956f236 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 16:21:05 +0200 Subject: [PATCH 20/39] Comment 03-exchange_data_between_servers.py --- .../03-exchange_data_between_servers.py | 148 +++++++++--------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/examples/03-advanced/03-exchange_data_between_servers.py b/examples/03-advanced/03-exchange_data_between_servers.py index 0cb9ce4335..1eac78225a 100644 --- a/examples/03-advanced/03-exchange_data_between_servers.py +++ b/examples/03-advanced/03-exchange_data_between_servers.py @@ -13,77 +13,77 @@ from ansys.dpf.core import examples from ansys.dpf.core import operators as ops -############################################################################### -# Create two servers -# ~~~~~~~~~~~~~~~~~~ -# Use the ``start_local_server()`` method to start two servers on your local -# machine. If you have another server, you can use the ``connect_to_server()`` -# method to connect to any DPF server on your network. - -# The ``as_global`` attributes allows you to choose whether a server is stored -# by the module and used by default. This example sets the first server as the default. -server1 = dpf.start_local_server(as_global=True, config=dpf.AvailableServerConfigs.GrpcServer) -server2 = dpf.start_local_server(as_global=False, config=dpf.AvailableServerConfigs.GrpcServer) - -# Check that the two servers are listening on different ports. -print(server1.port if hasattr(server1, "port") else "", - server2.port if hasattr(server2, "port") else "") - -############################################################################### -# Send the result file -# ~~~~~~~~~~~~~~~~~~~~ -# The result file is sent to the temporary directory of the first server. -# This file upload is useless in this case because the two servers are local -# machines. -file = examples.complex_rst -file_path_in_tmp = dpf.upload_file_in_tmp_folder(file) - -############################################################################### -# Create a workflow on the first server -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Create the model -model = dpf.Model(file_path_in_tmp) - -# Read displacement -disp = model.results.displacement() -disp.inputs.time_scoping(len(model.metadata.time_freq_support.time_frequencies)) - -############################################################################### -# Create a workflow on the second server -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -# Change the Cartesian coordinates to cylindrical coordinates cs -coordinates = ops.geo.rotate_in_cylindrical_cs_fc(server=server2) - -# Create the Cartesian coordinate cs -cs = dpf.fields_factory.create_scalar_field(12, server=server2) -cs.data = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0] - -coordinates.inputs.coordinate_system(cs) - -# Choose the radial component to plot -comp = dpf.operators.logic.component_selector_fc(coordinates, 0, server=server2) - -############################################################################### -# Pass data from one server to another -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -fc_disp = disp.outputs.fields_container() -fc_copy = fc_disp.deep_copy(server=server2) - -mesh_copy = model.metadata.meshed_region.deep_copy(server=server2) -# give a mesh to the field -fc_copy[0].meshed_region = mesh_copy -fc_copy[1].meshed_region = mesh_copy - -coordinates.inputs.field(fc_copy) - -############################################################################### -# Plot the output -# ~~~~~~~~~~~~~~~ -out = comp.outputs.fields_container() - -# real part -mesh_copy.plot(out.get_field({"complex": 0})) - -# imaginary part -mesh_copy.plot(out.get_field({"complex": 1})) +# ############################################################################### +# # Create two servers +# # ~~~~~~~~~~~~~~~~~~ +# # Use the ``start_local_server()`` method to start two servers on your local +# # machine. If you have another server, you can use the ``connect_to_server()`` +# # method to connect to any DPF server on your network. +# +# # The ``as_global`` attributes allows you to choose whether a server is stored +# # by the module and used by default. This example sets the first server as the default. +# server1 = dpf.start_local_server(as_global=True, config=dpf.AvailableServerConfigs.GrpcServer) +# server2 = dpf.start_local_server(as_global=False, config=dpf.AvailableServerConfigs.GrpcServer) +# +# # Check that the two servers are listening on different ports. +# print(server1.port if hasattr(server1, "port") else "", +# server2.port if hasattr(server2, "port") else "") +# +# ############################################################################### +# # Send the result file +# # ~~~~~~~~~~~~~~~~~~~~ +# # The result file is sent to the temporary directory of the first server. +# # This file upload is useless in this case because the two servers are local +# # machines. +# file = examples.complex_rst +# file_path_in_tmp = dpf.upload_file_in_tmp_folder(file) +# +# ############################################################################### +# # Create a workflow on the first server +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# # Create the model +# model = dpf.Model(file_path_in_tmp) +# +# # Read displacement +# disp = model.results.displacement() +# disp.inputs.time_scoping(len(model.metadata.time_freq_support.time_frequencies)) +# +# ############################################################################### +# # Create a workflow on the second server +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# # Change the Cartesian coordinates to cylindrical coordinates cs +# coordinates = ops.geo.rotate_in_cylindrical_cs_fc(server=server2) +# +# # Create the Cartesian coordinate cs +# cs = dpf.fields_factory.create_scalar_field(12, server=server2) +# cs.data = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0] +# +# coordinates.inputs.coordinate_system(cs) +# +# # Choose the radial component to plot +# comp = dpf.operators.logic.component_selector_fc(coordinates, 0, server=server2) +# +# ############################################################################### +# # Pass data from one server to another +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# fc_disp = disp.outputs.fields_container() +# fc_copy = fc_disp.deep_copy(server=server2) +# +# mesh_copy = model.metadata.meshed_region.deep_copy(server=server2) +# # give a mesh to the field +# fc_copy[0].meshed_region = mesh_copy +# fc_copy[1].meshed_region = mesh_copy +# +# coordinates.inputs.field(fc_copy) +# +# ############################################################################### +# # Plot the output +# # ~~~~~~~~~~~~~~~ +# out = comp.outputs.fields_container() +# +# # real part +# mesh_copy.plot(out.get_field({"complex": 0})) +# +# # imaginary part +# mesh_copy.plot(out.get_field({"complex": 1})) From 89d4f75933b67b3643d56888327d6dff8131d9d9 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 16:21:49 +0200 Subject: [PATCH 21/39] Comment 11-server_types.py --- examples/00-basic/11-server_types.py | 128 +++++++++++++-------------- 1 file changed, 64 insertions(+), 64 deletions(-) diff --git a/examples/00-basic/11-server_types.py b/examples/00-basic/11-server_types.py index 8e49f4cc83..2fe9d62eff 100644 --- a/examples/00-basic/11-server_types.py +++ b/examples/00-basic/11-server_types.py @@ -23,67 +23,67 @@ """ from ansys.dpf import core as dpf - -############################################################################### -# Start servers with custom server configuration -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -in_process_config = dpf.AvailableServerConfigs.InProcessServer -grpc_config = dpf.AvailableServerConfigs.GrpcServer -legacy_grpc_config = dpf.AvailableServerConfigs.LegacyGrpcServer - -in_process_server = dpf.start_local_server(config=in_process_config) -grpc_server = dpf.start_local_server(config=grpc_config) -legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config) - -############################################################################### -# Equivalent to: - -in_process_config = dpf.ServerConfig( - protocol=None, legacy=False -) -grpc_config = dpf.ServerConfig( - protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=False -) -legacy_grpc_config = dpf.ServerConfig( - protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=True -) - -in_process_server = dpf.start_local_server(config=in_process_config, as_global=False) -grpc_server = dpf.start_local_server(config=grpc_config, as_global=False) -legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config, as_global=False) - -############################################################################### -# Create data on different servers -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -in_process_field = dpf.fields_factory.create_scalar_field(2, server=in_process_server) -in_process_field.append([1.], 1) -in_process_field.append([2.], 2) -grpc_field = dpf.fields_factory.create_scalar_field(2, server=grpc_server) -grpc_field.append([1.], 1) -grpc_field.append([2.], 2) -legacy_grpc_field = dpf.fields_factory.create_scalar_field(2, server=legacy_grpc_server) -legacy_grpc_field.append([1.], 1) -legacy_grpc_field.append([2.], 2) - -print(in_process_field, type(in_process_field._server), in_process_field._server) -print(grpc_field, type(grpc_field._server), grpc_field._server) -print(legacy_grpc_field, type(legacy_grpc_field._server), legacy_grpc_field._server) - -############################################################################### -# Choose default configuration -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Once a default configuration is chosen, a server of the chosen type is -# automatically started when a DPF object is created: - -initial_config = dpf.SERVER_CONFIGURATION - -dpf.SERVER_CONFIGURATION = dpf.AvailableServerConfigs.GrpcServer -grpc_field = dpf.fields_factory.create_scalar_field(2) -grpc_field.append([1.], 1) -grpc_field.append([2.], 2) -print(grpc_field, type(grpc_field._server), grpc_field._server) - -# Go back to default config: -dpf.SERVER_CONFIGURATION = initial_config +# +# ############################################################################### +# # Start servers with custom server configuration +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# in_process_config = dpf.AvailableServerConfigs.InProcessServer +# grpc_config = dpf.AvailableServerConfigs.GrpcServer +# legacy_grpc_config = dpf.AvailableServerConfigs.LegacyGrpcServer +# +# in_process_server = dpf.start_local_server(config=in_process_config) +# grpc_server = dpf.start_local_server(config=grpc_config) +# legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config) +# +# ############################################################################### +# # Equivalent to: +# +# in_process_config = dpf.ServerConfig( +# protocol=None, legacy=False +# ) +# grpc_config = dpf.ServerConfig( +# protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=False +# ) +# legacy_grpc_config = dpf.ServerConfig( +# protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=True +# ) +# +# in_process_server = dpf.start_local_server(config=in_process_config, as_global=False) +# grpc_server = dpf.start_local_server(config=grpc_config, as_global=False) +# legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config, as_global=False) +# +# ############################################################################### +# # Create data on different servers +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# in_process_field = dpf.fields_factory.create_scalar_field(2, server=in_process_server) +# in_process_field.append([1.], 1) +# in_process_field.append([2.], 2) +# grpc_field = dpf.fields_factory.create_scalar_field(2, server=grpc_server) +# grpc_field.append([1.], 1) +# grpc_field.append([2.], 2) +# legacy_grpc_field = dpf.fields_factory.create_scalar_field(2, server=legacy_grpc_server) +# legacy_grpc_field.append([1.], 1) +# legacy_grpc_field.append([2.], 2) +# +# print(in_process_field, type(in_process_field._server), in_process_field._server) +# print(grpc_field, type(grpc_field._server), grpc_field._server) +# print(legacy_grpc_field, type(legacy_grpc_field._server), legacy_grpc_field._server) +# +# ############################################################################### +# # Choose default configuration +# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# # Once a default configuration is chosen, a server of the chosen type is +# # automatically started when a DPF object is created: +# +# initial_config = dpf.SERVER_CONFIGURATION +# +# dpf.SERVER_CONFIGURATION = dpf.AvailableServerConfigs.GrpcServer +# grpc_field = dpf.fields_factory.create_scalar_field(2) +# grpc_field.append([1.], 1) +# grpc_field.append([2.], 2) +# print(grpc_field, type(grpc_field._server), grpc_field._server) +# +# # Go back to default config: +# dpf.SERVER_CONFIGURATION = initial_config From 7a8935b626e298db88fa030f35bb4ecc5c71f71d Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Mon, 10 Oct 2022 18:28:49 +0200 Subject: [PATCH 22/39] Add a waiting time for servers to close. --- docs/source/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index c51b66a409..3d18890f08 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -103,6 +103,8 @@ def reset_servers(gallery_conf, fname, when): import psutil + import time + time.sleep(5) proc_name = "Ans.Dpf.Grpc" nb_procs = 0 for proc in psutil.process_iter(): From 62999b4fed221e29c527c8b3895f7ef6b9a0341a Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 11:40:19 +0200 Subject: [PATCH 23/39] Add garbage collection and call shutdown_all_session_servers after each example. --- docs/source/conf.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 3d18890f08..a0c8e097ee 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -103,8 +103,13 @@ def reset_servers(gallery_conf, fname, when): import psutil - import time - time.sleep(5) + from ansys.dpf.core import server + import gc + + if when == "after": + gc.collect() + server.shutdown_all_session_servers() + proc_name = "Ans.Dpf.Grpc" nb_procs = 0 for proc in psutil.process_iter(): From 0c5a2e1e79ea197a54b5faf04454da6b570aee1e Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 11:40:44 +0200 Subject: [PATCH 24/39] Revert "Comment 11-server_types.py" This reverts commit 89d4f75933b67b3643d56888327d6dff8131d9d9. --- examples/00-basic/11-server_types.py | 128 +++++++++++++-------------- 1 file changed, 64 insertions(+), 64 deletions(-) diff --git a/examples/00-basic/11-server_types.py b/examples/00-basic/11-server_types.py index 2fe9d62eff..8e49f4cc83 100644 --- a/examples/00-basic/11-server_types.py +++ b/examples/00-basic/11-server_types.py @@ -23,67 +23,67 @@ """ from ansys.dpf import core as dpf -# -# ############################################################################### -# # Start servers with custom server configuration -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# -# in_process_config = dpf.AvailableServerConfigs.InProcessServer -# grpc_config = dpf.AvailableServerConfigs.GrpcServer -# legacy_grpc_config = dpf.AvailableServerConfigs.LegacyGrpcServer -# -# in_process_server = dpf.start_local_server(config=in_process_config) -# grpc_server = dpf.start_local_server(config=grpc_config) -# legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config) -# -# ############################################################################### -# # Equivalent to: -# -# in_process_config = dpf.ServerConfig( -# protocol=None, legacy=False -# ) -# grpc_config = dpf.ServerConfig( -# protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=False -# ) -# legacy_grpc_config = dpf.ServerConfig( -# protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=True -# ) -# -# in_process_server = dpf.start_local_server(config=in_process_config, as_global=False) -# grpc_server = dpf.start_local_server(config=grpc_config, as_global=False) -# legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config, as_global=False) -# -# ############################################################################### -# # Create data on different servers -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# -# in_process_field = dpf.fields_factory.create_scalar_field(2, server=in_process_server) -# in_process_field.append([1.], 1) -# in_process_field.append([2.], 2) -# grpc_field = dpf.fields_factory.create_scalar_field(2, server=grpc_server) -# grpc_field.append([1.], 1) -# grpc_field.append([2.], 2) -# legacy_grpc_field = dpf.fields_factory.create_scalar_field(2, server=legacy_grpc_server) -# legacy_grpc_field.append([1.], 1) -# legacy_grpc_field.append([2.], 2) -# -# print(in_process_field, type(in_process_field._server), in_process_field._server) -# print(grpc_field, type(grpc_field._server), grpc_field._server) -# print(legacy_grpc_field, type(legacy_grpc_field._server), legacy_grpc_field._server) -# -# ############################################################################### -# # Choose default configuration -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# # Once a default configuration is chosen, a server of the chosen type is -# # automatically started when a DPF object is created: -# -# initial_config = dpf.SERVER_CONFIGURATION -# -# dpf.SERVER_CONFIGURATION = dpf.AvailableServerConfigs.GrpcServer -# grpc_field = dpf.fields_factory.create_scalar_field(2) -# grpc_field.append([1.], 1) -# grpc_field.append([2.], 2) -# print(grpc_field, type(grpc_field._server), grpc_field._server) -# -# # Go back to default config: -# dpf.SERVER_CONFIGURATION = initial_config + +############################################################################### +# Start servers with custom server configuration +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +in_process_config = dpf.AvailableServerConfigs.InProcessServer +grpc_config = dpf.AvailableServerConfigs.GrpcServer +legacy_grpc_config = dpf.AvailableServerConfigs.LegacyGrpcServer + +in_process_server = dpf.start_local_server(config=in_process_config) +grpc_server = dpf.start_local_server(config=grpc_config) +legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config) + +############################################################################### +# Equivalent to: + +in_process_config = dpf.ServerConfig( + protocol=None, legacy=False +) +grpc_config = dpf.ServerConfig( + protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=False +) +legacy_grpc_config = dpf.ServerConfig( + protocol=dpf.server_factory.CommunicationProtocols.gRPC, legacy=True +) + +in_process_server = dpf.start_local_server(config=in_process_config, as_global=False) +grpc_server = dpf.start_local_server(config=grpc_config, as_global=False) +legacy_grpc_server = dpf.start_local_server(config=legacy_grpc_config, as_global=False) + +############################################################################### +# Create data on different servers +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +in_process_field = dpf.fields_factory.create_scalar_field(2, server=in_process_server) +in_process_field.append([1.], 1) +in_process_field.append([2.], 2) +grpc_field = dpf.fields_factory.create_scalar_field(2, server=grpc_server) +grpc_field.append([1.], 1) +grpc_field.append([2.], 2) +legacy_grpc_field = dpf.fields_factory.create_scalar_field(2, server=legacy_grpc_server) +legacy_grpc_field.append([1.], 1) +legacy_grpc_field.append([2.], 2) + +print(in_process_field, type(in_process_field._server), in_process_field._server) +print(grpc_field, type(grpc_field._server), grpc_field._server) +print(legacy_grpc_field, type(legacy_grpc_field._server), legacy_grpc_field._server) + +############################################################################### +# Choose default configuration +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Once a default configuration is chosen, a server of the chosen type is +# automatically started when a DPF object is created: + +initial_config = dpf.SERVER_CONFIGURATION + +dpf.SERVER_CONFIGURATION = dpf.AvailableServerConfigs.GrpcServer +grpc_field = dpf.fields_factory.create_scalar_field(2) +grpc_field.append([1.], 1) +grpc_field.append([2.], 2) +print(grpc_field, type(grpc_field._server), grpc_field._server) + +# Go back to default config: +dpf.SERVER_CONFIGURATION = initial_config From 4d7e2bacf052b67df010a39849c6d68c42c3b59c Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 11:40:49 +0200 Subject: [PATCH 25/39] Revert "Comment 03-exchange_data_between_servers.py" This reverts commit 23c5ece9adef222aedadafee71f7075bc956f236. --- .../03-exchange_data_between_servers.py | 148 +++++++++--------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/examples/03-advanced/03-exchange_data_between_servers.py b/examples/03-advanced/03-exchange_data_between_servers.py index 1eac78225a..0cb9ce4335 100644 --- a/examples/03-advanced/03-exchange_data_between_servers.py +++ b/examples/03-advanced/03-exchange_data_between_servers.py @@ -13,77 +13,77 @@ from ansys.dpf.core import examples from ansys.dpf.core import operators as ops -# ############################################################################### -# # Create two servers -# # ~~~~~~~~~~~~~~~~~~ -# # Use the ``start_local_server()`` method to start two servers on your local -# # machine. If you have another server, you can use the ``connect_to_server()`` -# # method to connect to any DPF server on your network. -# -# # The ``as_global`` attributes allows you to choose whether a server is stored -# # by the module and used by default. This example sets the first server as the default. -# server1 = dpf.start_local_server(as_global=True, config=dpf.AvailableServerConfigs.GrpcServer) -# server2 = dpf.start_local_server(as_global=False, config=dpf.AvailableServerConfigs.GrpcServer) -# -# # Check that the two servers are listening on different ports. -# print(server1.port if hasattr(server1, "port") else "", -# server2.port if hasattr(server2, "port") else "") -# -# ############################################################################### -# # Send the result file -# # ~~~~~~~~~~~~~~~~~~~~ -# # The result file is sent to the temporary directory of the first server. -# # This file upload is useless in this case because the two servers are local -# # machines. -# file = examples.complex_rst -# file_path_in_tmp = dpf.upload_file_in_tmp_folder(file) -# -# ############################################################################### -# # Create a workflow on the first server -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# # Create the model -# model = dpf.Model(file_path_in_tmp) -# -# # Read displacement -# disp = model.results.displacement() -# disp.inputs.time_scoping(len(model.metadata.time_freq_support.time_frequencies)) -# -# ############################################################################### -# # Create a workflow on the second server -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# -# # Change the Cartesian coordinates to cylindrical coordinates cs -# coordinates = ops.geo.rotate_in_cylindrical_cs_fc(server=server2) -# -# # Create the Cartesian coordinate cs -# cs = dpf.fields_factory.create_scalar_field(12, server=server2) -# cs.data = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0] -# -# coordinates.inputs.coordinate_system(cs) -# -# # Choose the radial component to plot -# comp = dpf.operators.logic.component_selector_fc(coordinates, 0, server=server2) -# -# ############################################################################### -# # Pass data from one server to another -# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# fc_disp = disp.outputs.fields_container() -# fc_copy = fc_disp.deep_copy(server=server2) -# -# mesh_copy = model.metadata.meshed_region.deep_copy(server=server2) -# # give a mesh to the field -# fc_copy[0].meshed_region = mesh_copy -# fc_copy[1].meshed_region = mesh_copy -# -# coordinates.inputs.field(fc_copy) -# -# ############################################################################### -# # Plot the output -# # ~~~~~~~~~~~~~~~ -# out = comp.outputs.fields_container() -# -# # real part -# mesh_copy.plot(out.get_field({"complex": 0})) -# -# # imaginary part -# mesh_copy.plot(out.get_field({"complex": 1})) +############################################################################### +# Create two servers +# ~~~~~~~~~~~~~~~~~~ +# Use the ``start_local_server()`` method to start two servers on your local +# machine. If you have another server, you can use the ``connect_to_server()`` +# method to connect to any DPF server on your network. + +# The ``as_global`` attributes allows you to choose whether a server is stored +# by the module and used by default. This example sets the first server as the default. +server1 = dpf.start_local_server(as_global=True, config=dpf.AvailableServerConfigs.GrpcServer) +server2 = dpf.start_local_server(as_global=False, config=dpf.AvailableServerConfigs.GrpcServer) + +# Check that the two servers are listening on different ports. +print(server1.port if hasattr(server1, "port") else "", + server2.port if hasattr(server2, "port") else "") + +############################################################################### +# Send the result file +# ~~~~~~~~~~~~~~~~~~~~ +# The result file is sent to the temporary directory of the first server. +# This file upload is useless in this case because the two servers are local +# machines. +file = examples.complex_rst +file_path_in_tmp = dpf.upload_file_in_tmp_folder(file) + +############################################################################### +# Create a workflow on the first server +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Create the model +model = dpf.Model(file_path_in_tmp) + +# Read displacement +disp = model.results.displacement() +disp.inputs.time_scoping(len(model.metadata.time_freq_support.time_frequencies)) + +############################################################################### +# Create a workflow on the second server +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# Change the Cartesian coordinates to cylindrical coordinates cs +coordinates = ops.geo.rotate_in_cylindrical_cs_fc(server=server2) + +# Create the Cartesian coordinate cs +cs = dpf.fields_factory.create_scalar_field(12, server=server2) +cs.data = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0] + +coordinates.inputs.coordinate_system(cs) + +# Choose the radial component to plot +comp = dpf.operators.logic.component_selector_fc(coordinates, 0, server=server2) + +############################################################################### +# Pass data from one server to another +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +fc_disp = disp.outputs.fields_container() +fc_copy = fc_disp.deep_copy(server=server2) + +mesh_copy = model.metadata.meshed_region.deep_copy(server=server2) +# give a mesh to the field +fc_copy[0].meshed_region = mesh_copy +fc_copy[1].meshed_region = mesh_copy + +coordinates.inputs.field(fc_copy) + +############################################################################### +# Plot the output +# ~~~~~~~~~~~~~~~ +out = comp.outputs.fields_container() + +# real part +mesh_copy.plot(out.get_field({"complex": 0})) + +# imaginary part +mesh_copy.plot(out.get_field({"complex": 1})) From 78b20ba37aeabf073edebd391dbf51321dada553 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 12:36:04 +0200 Subject: [PATCH 26/39] Shut servers down and collect garbage before the examples too. --- docs/source/conf.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index a0c8e097ee..1b452cccc6 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -106,9 +106,8 @@ def reset_servers(gallery_conf, fname, when): from ansys.dpf.core import server import gc - if when == "after": - gc.collect() - server.shutdown_all_session_servers() + gc.collect() + server.shutdown_all_session_servers() proc_name = "Ans.Dpf.Grpc" nb_procs = 0 From c037168dd20f36b912e8c750f156d0f970d9a376 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 13:13:39 +0200 Subject: [PATCH 27/39] Revert "Comment-out other jobs" This reverts commit ce9d569cddbf7e9165778042875677c10ba108c0. --- .github/workflows/ci.yml | 52 ++++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 403bd6a5fa..4cf532ad84 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -562,32 +562,32 @@ jobs: # - name: "Upload coverage to Codecov" # uses: codecov/codecov-action@v3 -# draft_release: -# name: "Draft Release" -# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') -# needs: [style, build_test, docs, run_examples, retro] -# runs-on: ubuntu-latest -# steps: -# - name: "Set up Python" -# uses: actions/setup-python@v4.1.0 -# with: -# python-version: 3.9 -# -# - name: "Download artifacts" -# uses: actions/download-artifact@v3 -# -# - name: "Display downloaded files" -# run: ls -R -# -# - name: "Create draft release" -# uses: softprops/action-gh-release@v1 -# with: -# files: | -# ./**/*.whl -# ./**/*.tar.gz -# ./**/*.zip -# draft: true -# generate_release_notes: true + draft_release: + name: "Draft Release" + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') + needs: [style, build_test, docs, run_examples, retro] + runs-on: ubuntu-latest + steps: + - name: "Set up Python" + uses: actions/setup-python@v4.1.0 + with: + python-version: 3.9 + + - name: "Download artifacts" + uses: actions/download-artifact@v3 + + - name: "Display downloaded files" + run: ls -R + + - name: "Create draft release" + uses: softprops/action-gh-release@v1 + with: + files: | + ./**/*.whl + ./**/*.tar.gz + ./**/*.zip + draft: true + generate_release_notes: true # - name: "Upload to Test PyPi" # Change TOKEN # run: | From 60874604feef1f2aa09dea490f540608baea9cc6 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 13:13:43 +0200 Subject: [PATCH 28/39] Revert "Comment-out other jobs" This reverts commit 6f5e080f0cd8e7af735ac5398e37c63f561d9021. --- .github/workflows/ci.yml | 834 +++++++++++++++++++-------------------- 1 file changed, 417 insertions(+), 417 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4cf532ad84..aafdda3971 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,170 +46,170 @@ jobs: run: | make flake8 -# build_test: -# name: "Build and Test" -# runs-on: ${{ matrix.os }} -# strategy: -# fail-fast: false -# matrix: -# python-version: ["3.7", "3.8", "3.9", "3.10"] -# os: ["windows-latest", "ubuntu-latest"] -# -# steps: -# - uses: actions/checkout@v3 -# -# - name: "Build Package" -# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 -# with: -# python-version: ${{ matrix.python-version }} -# ANSYS_VERSION: ${{env.ANSYS_VERSION}} -# PACKAGE_NAME: ${{env.PACKAGE_NAME}} -# MODULE: ${{env.MODULE}} -# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} -# install_extras: plotting -# wheelhouse: true -# extra-pip-args: ${{ env.extra }} -# -# - name: "Prepare Testing Environment" -# uses: pyansys/pydpf-actions/prepare_tests@v2.2.dev1 -# with: -# DEBUG: true -# -# - name: "Test Docstrings" -# uses: pyansys/pydpf-actions/test_docstrings@v2.2.dev1 -# if: matrix.python-version != '3.10' && matrix.python-version != '3.7' && matrix.os != 'Linux' -# with: -# MODULE: ${{env.MODULE}} -# PACKAGE_NAME: ${{env.PACKAGE_NAME}} -# -# - name: "Separate long Core tests" -# shell: pwsh -# run: | -# New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" -# New-Item -Path ".\" -Name "test_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" -# New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" -# New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" -# Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" -# Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" -# Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" -# Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" -# Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" -# Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" -# Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" -# Remove-Item -Path "tests\test_server.py" -# Remove-Item -Path "tests\test_launcher.py" -# Remove-Item -Path "tests\test_local_server.py" -# Remove-Item -Path "tests\test_multi_server.py" -# Remove-Item -Path "tests\test_workflow.py" -# Remove-Item -Path "tests\test_remote_workflow.py" -# Remove-Item -Path "tests\test_remote_operator.py" -# -# - name: "Test API" -# shell: bash -# working-directory: tests -# run: | -# pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_launcher" -# shell: bash -# working-directory: test_launcher -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_server" -# shell: bash -# working-directory: test_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_local_server" -# shell: bash -# working-directory: test_local_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_multi_server" -# shell: bash -# working-directory: test_multi_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_remote_workflow" -# shell: bash -# working-directory: test_remote_workflow -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_remote_operator" -# shell: bash -# working-directory: test_remote_operator -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_workflow" -# shell: bash -# working-directory: test_workflow -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Upload Test Results" -# uses: actions/upload-artifact@v2 -# with: -# name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ env.ANSYS_VERSION }} -# path: tests/junit/test-results.xml -# if: always() -# -# - name: "Upload coverage to Codecov" -# uses: codecov/codecov-action@v3 + build_test: + name: "Build and Test" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + os: ["windows-latest", "ubuntu-latest"] + + steps: + - uses: actions/checkout@v3 + + - name: "Build Package" + uses: pyansys/pydpf-actions/build_package@v2.2.dev1 + with: + python-version: ${{ matrix.python-version }} + ANSYS_VERSION: ${{env.ANSYS_VERSION}} + PACKAGE_NAME: ${{env.PACKAGE_NAME}} + MODULE: ${{env.MODULE}} + dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} + install_extras: plotting + wheelhouse: true + extra-pip-args: ${{ env.extra }} + + - name: "Prepare Testing Environment" + uses: pyansys/pydpf-actions/prepare_tests@v2.2.dev1 + with: + DEBUG: true + + - name: "Test Docstrings" + uses: pyansys/pydpf-actions/test_docstrings@v2.2.dev1 + if: matrix.python-version != '3.10' && matrix.python-version != '3.7' && matrix.os != 'Linux' + with: + MODULE: ${{env.MODULE}} + PACKAGE_NAME: ${{env.PACKAGE_NAME}} + + - name: "Separate long Core tests" + shell: pwsh + run: | + New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" + New-Item -Path ".\" -Name "test_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" + New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" + New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" + Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" + Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" + Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" + Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" + Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" + Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" + Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" + Remove-Item -Path "tests\test_server.py" + Remove-Item -Path "tests\test_launcher.py" + Remove-Item -Path "tests\test_local_server.py" + Remove-Item -Path "tests\test_multi_server.py" + Remove-Item -Path "tests\test_workflow.py" + Remove-Item -Path "tests\test_remote_workflow.py" + Remove-Item -Path "tests\test_remote_operator.py" + + - name: "Test API" + shell: bash + working-directory: tests + run: | + pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_launcher" + shell: bash + working-directory: test_launcher + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_server" + shell: bash + working-directory: test_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_local_server" + shell: bash + working-directory: test_local_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_multi_server" + shell: bash + working-directory: test_multi_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_remote_workflow" + shell: bash + working-directory: test_remote_workflow + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_remote_operator" + shell: bash + working-directory: test_remote_operator + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_workflow" + shell: bash + working-directory: test_workflow + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Upload Test Results" + uses: actions/upload-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ env.ANSYS_VERSION }} + path: tests/junit/test-results.xml + if: always() + + - name: "Upload coverage to Codecov" + uses: codecov/codecov-action@v3 docs: name: "Documentation" @@ -308,259 +308,259 @@ jobs: name: HTML-doc-${{env.PACKAGE_NAME}} path: HTML-doc-${{env.PACKAGE_NAME}}.zip -# run_examples: -# name: "Run Examples with/without bin" -# runs-on: ${{ matrix.os }} -# strategy: -# fail-fast: false -# matrix: -# python-version: ["3.8"] -# os: ["windows-latest"] -# -# steps: -# - uses: actions/checkout@v3 -# -# - name: Setup Python -# uses: actions/setup-python@v4.2.0 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: "Build Package" -# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 -# with: -# python-version: ${{ matrix.python-version }} -# ANSYS_VERSION: ${{env.ANSYS_VERSION}} -# PACKAGE_NAME: ${{env.PACKAGE_NAME}} -# MODULE: ${{env.MODULE}} -# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} -# install_extras: plotting -# wheelhouse: false -# wheel: false -# extra-pip-args: ${{ env.extra }} -# -# - name: "Setup headless display" -# uses: pyvista/setup-headless-display-action@v1 -# -# - name: "Check examples with gatebin" -# shell: bash -# run: | -# echo on -# cd .ci -# ls . -# python run_examples.py -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Uninstall gatebin" -# shell: bash -# run: | -# pip uninstall -y ansys-dpf-gatebin -# if: always() -# -# - name: "Check sanity without gatebin INPROCESS" -# shell: bash -# run: | -# cd .ci -# python run_non_regression_examples.py -# env: -# DPF_SERVER_TYPE: INPROCESS -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Check sanity without gatebin GRPC" -# shell: bash -# run: | -# cd .ci -# python run_non_regression_examples.py -# env: -# DPF_SERVER_TYPE: GRPC -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Check sanity without gatebin LEGACYGRPC" -# shell: bash -# run: | -# cd .ci -# python run_non_regression_examples.py -# env: -# DPF_SERVER_TYPE: LEGACYGRPC -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() - -# retro: -# name: "Retro-compatibility" -# runs-on: ${{ matrix.os }} -# strategy: -# fail-fast: false -# matrix: -# python-version: ["3.8"] -# os: ["windows-latest", "ubuntu-latest"] -# ANSYS_VERSION: ["222", "221"] -# steps: -# - uses: actions/checkout@v3 -# -# - name: "Build Package" -# uses: pyansys/pydpf-actions/build_package@v2.2.dev1 -# with: -# python-version: ${{ matrix.python-version }} -# ANSYS_VERSION: ${{matrix.ANSYS_VERSION}} -# PACKAGE_NAME: ${{env.PACKAGE_NAME}} -# MODULE: ${{env.MODULE}} -# dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} -# install_extras: plotting -# wheel: false -# extra-pip-args: ${{ env.extra }} -# -# - name: "Install ansys-grpc-dpf==0.4.0" -# shell: pwsh -# run: | -# pip install ansys-grpc-dpf==0.4.0 -# if: matrix.ANSYS_VERSION == '221' -# -# - name: "Prepare Testing Environment" -# uses: pyansys/pydpf-actions/prepare_tests@v2.2 -# with: -# DEBUG: true -# -# - name: "Separate long Core tests" -# shell: pwsh -# run: | -# New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" -# New-Item -Path ".\" -Name "test_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" -# New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" -# New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" -# New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" -# Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" -# Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" -# Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" -# Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" -# Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" -# Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" -# Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" -# Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" -# Remove-Item -Path "tests\test_server.py" -# Remove-Item -Path "tests\test_launcher.py" -# Remove-Item -Path "tests\test_local_server.py" -# Remove-Item -Path "tests\test_multi_server.py" -# Remove-Item -Path "tests\test_workflow.py" -# Remove-Item -Path "tests\test_remote_workflow.py" -# Remove-Item -Path "tests\test_remote_operator.py" -# -# - name: "Test API" -# shell: bash -# working-directory: tests -# run: | -# pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_launcher" -# shell: bash -# working-directory: test_launcher -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_server" -# shell: bash -# working-directory: test_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_local_server" -# shell: bash -# working-directory: test_local_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_multi_server" -# shell: bash -# working-directory: test_multi_server -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_remote_workflow" -# shell: bash -# working-directory: test_remote_workflow -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_remote_operator" -# shell: bash -# working-directory: test_remote_operator -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Test API test_workflow" -# shell: bash -# working-directory: test_workflow -# run: | -# pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . -# if: always() -# -# - name: "Kill all servers" -# uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 -# if: always() -# -# - name: "Upload Test Results" -# uses: actions/upload-artifact@v2 -# with: -# name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ matrix.ANSYS_VERSION }} -# path: tests/junit/test-results.xml -# if: always() -# -# - name: "Upload coverage to Codecov" -# uses: codecov/codecov-action@v3 + run_examples: + name: "Run Examples with/without bin" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.8"] + os: ["windows-latest"] + + steps: + - uses: actions/checkout@v3 + + - name: Setup Python + uses: actions/setup-python@v4.2.0 + with: + python-version: ${{ matrix.python-version }} + + - name: "Build Package" + uses: pyansys/pydpf-actions/build_package@v2.2.dev1 + with: + python-version: ${{ matrix.python-version }} + ANSYS_VERSION: ${{env.ANSYS_VERSION}} + PACKAGE_NAME: ${{env.PACKAGE_NAME}} + MODULE: ${{env.MODULE}} + dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} + install_extras: plotting + wheelhouse: false + wheel: false + extra-pip-args: ${{ env.extra }} + + - name: "Setup headless display" + uses: pyvista/setup-headless-display-action@v1 + + - name: "Check examples with gatebin" + shell: bash + run: | + echo on + cd .ci + ls . + python run_examples.py + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Uninstall gatebin" + shell: bash + run: | + pip uninstall -y ansys-dpf-gatebin + if: always() + + - name: "Check sanity without gatebin INPROCESS" + shell: bash + run: | + cd .ci + python run_non_regression_examples.py + env: + DPF_SERVER_TYPE: INPROCESS + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Check sanity without gatebin GRPC" + shell: bash + run: | + cd .ci + python run_non_regression_examples.py + env: + DPF_SERVER_TYPE: GRPC + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Check sanity without gatebin LEGACYGRPC" + shell: bash + run: | + cd .ci + python run_non_regression_examples.py + env: + DPF_SERVER_TYPE: LEGACYGRPC + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + retro: + name: "Retro-compatibility" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.8"] + os: ["windows-latest", "ubuntu-latest"] + ANSYS_VERSION: ["222", "221"] + steps: + - uses: actions/checkout@v3 + + - name: "Build Package" + uses: pyansys/pydpf-actions/build_package@v2.2.dev1 + with: + python-version: ${{ matrix.python-version }} + ANSYS_VERSION: ${{matrix.ANSYS_VERSION}} + PACKAGE_NAME: ${{env.PACKAGE_NAME}} + MODULE: ${{env.MODULE}} + dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} + install_extras: plotting + wheel: false + extra-pip-args: ${{ env.extra }} + + - name: "Install ansys-grpc-dpf==0.4.0" + shell: pwsh + run: | + pip install ansys-grpc-dpf==0.4.0 + if: matrix.ANSYS_VERSION == '221' + + - name: "Prepare Testing Environment" + uses: pyansys/pydpf-actions/prepare_tests@v2.2 + with: + DEBUG: true + + - name: "Separate long Core tests" + shell: pwsh + run: | + New-Item -Path ".\" -Name "test_launcher" -ItemType "directory" + New-Item -Path ".\" -Name "test_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_local_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_multi_server" -ItemType "directory" + New-Item -Path ".\" -Name "test_workflow" -ItemType "directory" + New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" + New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_multi_server\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_workflow\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" + Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" + Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" + Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" + Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" + Copy-Item -Path "tests\test_multi_server.py" -Destination ".\test_multi_server\" + Copy-Item -Path "tests\test_workflow.py" -Destination ".\test_workflow\" + Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_workflow\" + Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" + Remove-Item -Path "tests\test_server.py" + Remove-Item -Path "tests\test_launcher.py" + Remove-Item -Path "tests\test_local_server.py" + Remove-Item -Path "tests\test_multi_server.py" + Remove-Item -Path "tests\test_workflow.py" + Remove-Item -Path "tests\test_remote_workflow.py" + Remove-Item -Path "tests\test_remote_operator.py" + + - name: "Test API" + shell: bash + working-directory: tests + run: | + pytest $DEBUG --cov=ansys.dpf.${{env.MODULE}} --cov-report=xml --cov-report=html --log-level=ERROR --junitxml=junit/test-results.xml --reruns 2 . + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_launcher" + shell: bash + working-directory: test_launcher + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_server" + shell: bash + working-directory: test_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_local_server" + shell: bash + working-directory: test_local_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_multi_server" + shell: bash + working-directory: test_multi_server + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_remote_workflow" + shell: bash + working-directory: test_remote_workflow + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_remote_operator" + shell: bash + working-directory: test_remote_operator + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Test API test_workflow" + shell: bash + working-directory: test_workflow + run: | + pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . + if: always() + + - name: "Kill all servers" + uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() + + - name: "Upload Test Results" + uses: actions/upload-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ matrix.ANSYS_VERSION }} + path: tests/junit/test-results.xml + if: always() + + - name: "Upload coverage to Codecov" + uses: codecov/codecov-action@v3 draft_release: name: "Draft Release" From 76c1c242e2d6bda2712efe5fddccf25f4b872eae Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 14:16:55 +0200 Subject: [PATCH 29/39] Debug upload_file_in_tmp_folder --- ansys/dpf/core/core.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index 72bcec9ba7..ba157e46d2 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -104,7 +104,11 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): :class:`ansys.dpf.core.server_types.InProcessServer`. """ base = BaseService(server, load_operators=False) - return base.upload_file_in_tmp_folder(file_path, new_file_name) + path = base.upload_file_in_tmp_folder(file_path, new_file_name) + warnings.warn(f"Uploaded file to {path}") + if path is None or path == '': + raise ValueError(f"Upload failed! Path returned is:{path}") + return def upload_files_in_folder( From ace582904a8b17bd42f4a64bd47b9b11129090c1 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 14:31:15 +0200 Subject: [PATCH 30/39] Debug upload_file_in_tmp_folder --- ansys/dpf/core/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index ba157e46d2..9c514fd0ce 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -108,7 +108,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): warnings.warn(f"Uploaded file to {path}") if path is None or path == '': raise ValueError(f"Upload failed! Path returned is:{path}") - return + return path def upload_files_in_folder( From 9af3ce9ec376e0dadc9b1b6c9355548c6bc145a7 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:08:05 +0200 Subject: [PATCH 31/39] Remove debug warning --- ansys/dpf/core/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index 9c514fd0ce..11153d262d 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -105,7 +105,6 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): """ base = BaseService(server, load_operators=False) path = base.upload_file_in_tmp_folder(file_path, new_file_name) - warnings.warn(f"Uploaded file to {path}") if path is None or path == '': raise ValueError(f"Upload failed! Path returned is:{path}") return path From c9d13ab191c64ab4cf072d72b7fc4de2b9214c69 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:26:27 +0200 Subject: [PATCH 32/39] Use tmp_path pytest fixture instead of deprecated tmpdir in test_service.py. --- tests/test_service.py | 120 +++++++++++++++++++++--------------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/tests/test_service.py b/tests/test_service.py index afe8d8783d..24c67375f7 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -58,11 +58,11 @@ def transfer_to_local_path(path): and (platform.python_version().startswith("3.8") or platform.python_version().startswith("3.7")), reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") -def test_upload_download(allkindofcomplexity, tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) +def test_upload_download(allkindofcomplexity, tmp_path, server_type_remote_process): + tmp_path = str(tmp_path) file = dpf.core.upload_file_in_tmp_folder( transfer_to_local_path(allkindofcomplexity), - server = server_type_remote_process + server=server_type_remote_process ) dataSource = dpf.core.DataSources(file, server=server_type_remote_process) op = dpf.core.Operator("S", server=server_type_remote_process) @@ -79,16 +79,16 @@ def test_upload_download(allkindofcomplexity, tmpdir, server_type_remote_process server=server_type_remote_process) vtk.run() - dpf.core.download_file(vtk_path, os.path.join(tmpdir, "file.vtk"), + dpf.core.download_file(vtk_path, os.path.join(tmp_path, "file.vtk"), server=server_type_remote_process) - assert os.path.exists(os.path.join(tmpdir, "file.vtk")) + assert os.path.exists(os.path.join(tmp_path, "file.vtk")) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_download_folder( - allkindofcomplexity, plate_msup, multishells, tmpdir, server_type_remote_process + allkindofcomplexity, plate_msup, multishells, tmp_path, server_type_remote_process ): - tmpdir = str(tmpdir) + tmp_path = str(tmp_path) file = dpf.core.upload_file_in_tmp_folder( allkindofcomplexity, server=server_type_remote_process ) @@ -100,18 +100,18 @@ def test_download_folder( ) parent_path = os.path.dirname(file) dpf.core.download_files_in_folder( - parent_path, tmpdir, server=server_type_remote_process + parent_path, tmp_path, server=server_type_remote_process ) import ntpath - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(allkindofcomplexity))) - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(plate_msup))) - assert os.path.exists(os.path.join(tmpdir, ntpath.basename(multishells))) + assert os.path.exists(os.path.join(tmp_path, ntpath.basename(allkindofcomplexity))) + assert os.path.exists(os.path.join(tmp_path, ntpath.basename(plate_msup))) + assert os.path.exists(os.path.join(tmp_path, ntpath.basename(multishells))) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") -def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) +def test_download_with_subdir(multishells, tmp_path, server_type_remote_process): + tmp_path = str(tmp_path) file = dpf.core.upload_file_in_tmp_folder( multishells, server=server_type_remote_process ) @@ -130,108 +130,108 @@ def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): folder = parent_path out = dpf.core.download_files_in_folder( - folder, tmpdir, server=server_type_remote_process + folder, tmp_path, server=server_type_remote_process ) - p1 = os.path.join(tmpdir, filename) - p2 = os.path.join(tmpdir, "subdir", filename) - # p1 = tmpdir + "/" + filename - # p2 = tmpdir + "/subdir/" + filename + p1 = os.path.join(tmp_path, filename) + p2 = os.path.join(tmp_path, "subdir", filename) + # p1 = tmp_path + "/" + filename + # p2 = tmp_path + "/subdir/" + filename assert os.path.exists(p1) assert os.path.exists(p2) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_downloadinfolder_uploadinfolder( - multishells, tmpdir, server_type_remote_process + multishells, tmp_path, server_type_remote_process ): - tmpdir = str(tmpdir) + tmp_path = str(tmp_path) base = dpf.core.BaseService(server=server_type_remote_process) - # create in tmpdir some architecture with subfolder in subfolder - path1 = os.path.join(tmpdir, os.path.basename(multishells)) - path2 = os.path.join(tmpdir, "subdirA", os.path.basename(multishells)) - path4 = os.path.join(tmpdir, "subdirB", os.path.basename(multishells)) + # create in tmp_path some architecture with subfolder in subfolder + path1 = os.path.join(tmp_path, os.path.basename(multishells)) + path2 = os.path.join(tmp_path, "subdirA", os.path.basename(multishells)) + path4 = os.path.join(tmp_path, "subdirB", os.path.basename(multishells)) from shutil import copyfile copyfile(multishells, path1) - os.mkdir(os.path.join(tmpdir, "subdirA")) + os.mkdir(os.path.join(tmp_path, "subdirA")) copyfile(multishells, path2) - os.mkdir(os.path.join(tmpdir, "subdirB")) + os.mkdir(os.path.join(tmp_path, "subdirB")) copyfile(multishells, path4) # upload it TARGET_PATH = base.make_tmp_dir_server() dpf.core.upload_files_in_folder( to_server_folder_path=TARGET_PATH, - client_folder_path=tmpdir, + client_folder_path=tmp_path, specific_extension="rst", server=server_type_remote_process, ) # download it - new_tmpdir = os.path.join(tmpdir, "my_tmp_dir") - os.mkdir(new_tmpdir) + new_tmp_path = os.path.join(tmp_path, "my_tmp_dir") + os.mkdir(new_tmp_path) out = dpf.core.download_files_in_folder( - TARGET_PATH, new_tmpdir, server=server_type_remote_process + TARGET_PATH, new_tmp_path, server=server_type_remote_process ) # check if the architecture of the download is ok - path1_check = os.path.join(new_tmpdir, os.path.basename(multishells)) - path2_check = os.path.join(new_tmpdir, "subdirA", os.path.basename(multishells)) - path4_check = os.path.join(new_tmpdir, "subdirB", os.path.basename(multishells)) + path1_check = os.path.join(new_tmp_path, os.path.basename(multishells)) + path2_check = os.path.join(new_tmp_path, "subdirA", os.path.basename(multishells)) + path4_check = os.path.join(new_tmp_path, "subdirB", os.path.basename(multishells)) assert os.path.exists(path1_check) assert os.path.exists(path2_check) assert os.path.exists(path4_check) # clean - # os.remove(os.path.join(tmpdir, "tmpdir")) - # os.remove(os.path.join(tmpdir, "subdirA")) - # os.remove(os.path.join(tmpdir, "subdirB")) + # os.remove(os.path.join(tmp_path, "tmp_path")) + # os.remove(os.path.join(tmp_path, "subdirA")) + # os.remove(os.path.join(tmp_path, "subdirB")) -# def test_downloadinfolder_uploadinfolder_subsubdir(multishells, tmpdir): +# def test_downloadinfolder_uploadinfolder_subsubdir(multishells, tmp_path): # base = dpf.core.BaseService() -# # create in tmpdir some architecture with subfolder in subfolder -# path1 = os.path.join(tmpdir, os.path.basename(multishells)) -# path2 = os.path.join(tmpdir, "subdirA", os.path.basename(multishells)) -# path3 = os.path.join(tmpdir, "subdirA", "subdir1", os.path.basename(multishells)) -# path4 = os.path.join(tmpdir, "subdirB", os.path.basename(multishells)) +# # create in tmp_path some architecture with subfolder in subfolder +# path1 = os.path.join(tmp_path, os.path.basename(multishells)) +# path2 = os.path.join(tmp_path, "subdirA", os.path.basename(multishells)) +# path3 = os.path.join(tmp_path, "subdirA", "subdir1", os.path.basename(multishells)) +# path4 = os.path.join(tmp_path, "subdirB", os.path.basename(multishells)) # from shutil import copyfile # copyfile(multishells, path1) -# os.mkdir(os.path.join(tmpdir, "subdirA")) +# os.mkdir(os.path.join(tmp_path, "subdirA")) # copyfile(multishells, path2) -# os.mkdir(os.path.join(tmpdir, "subdirA", "subdir1")) +# os.mkdir(os.path.join(tmp_path, "subdirA", "subdir1")) # copyfile(multishells, path3) -# os.mkdir(os.path.join(tmpdir, "subdirB")) +# os.mkdir(os.path.join(tmp_path, "subdirB")) # copyfile(multishells, path4) # # upload it # TARGET_PATH = base.make_tmp_dir_server() # base.upload_files_in_folder( # to_server_folder_path = TARGET_PATH, -# client_folder_path = tmpdir, +# client_folder_path = tmp_path, # specific_extension = "rst" # ) # # download it -# new_tmpdir = os.path.join(tmpdir, "tmpdir") -# os.mkdir(new_tmpdir) -# out = dpf.core.download_files_in_folder(TARGET_PATH, new_tmpdir) +# new_tmp_path = os.path.join(tmp_path, "tmp_path") +# os.mkdir(new_tmp_path) +# out = dpf.core.download_files_in_folder(TARGET_PATH, new_tmp_path) # # check if the architecture of the download is ok -# path1_check = os.path.join(new_tmpdir, os.path.basename(multishells)) -# path2_check = os.path.join(new_tmpdir, "subdirA", os.path.basename(multishells)) -# path3_check = os.path.join(new_tmpdir, "subdirA", "subdir1", os.path.basename(multishells)) -# path4_check = os.path.join(new_tmpdir, "subdirB", os.path.basename(multishells)) +# path1_check = os.path.join(new_tmp_path, os.path.basename(multishells)) +# path2_check = os.path.join(new_tmp_path, "subdirA", os.path.basename(multishells)) +# path3_check = os.path.join(new_tmp_path, "subdirA", "subdir1", os.path.basename(multishells)) +# path4_check = os.path.join(new_tmp_path, "subdirB", os.path.basename(multishells)) # assert os.path.exists(path1_check) # assert os.path.exists(path2_check) # assert os.path.exists(path3_check) # assert os.path.exists(path4_check) # # clean -# # os.remove(os.path.join(tmpdir, "tmpdir")) -# # os.remove(os.path.join(tmpdir, "subdirA")) -# # os.remove(os.path.join(tmpdir, "subdirA", "subdir1")) -# # os.remove(os.path.join(tmpdir, "subdirB")) +# # os.remove(os.path.join(tmp_path, "tmp_path")) +# # os.remove(os.path.join(tmp_path, "subdirA")) +# # os.remove(os.path.join(tmp_path, "subdirA", "subdir1")) +# # os.remove(os.path.join(tmp_path, "subdirB")) -def test_uploadinfolder_emptyfolder(tmpdir, server_type_remote_process): - tmpdir = str(tmpdir) +def test_uploadinfolder_emptyfolder(tmp_path, server_type_remote_process): + tmp_path = str(tmp_path) base = dpf.core.BaseService(server=server_type_remote_process) TARGET_PATH = base.make_tmp_dir_server() path = base.upload_files_in_folder( - to_server_folder_path=TARGET_PATH, client_folder_path=tmpdir + to_server_folder_path=TARGET_PATH, client_folder_path=tmp_path ) assert len(path) == 0 From a589153e8352b4ef06734d6ae45c2b68d7f726d3 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:27:00 +0200 Subject: [PATCH 33/39] Unskip test_upload_download on Windows for python 3.7 and 3.8 --- tests/test_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_service.py b/tests/test_service.py index 24c67375f7..beacea4fa9 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -54,10 +54,10 @@ def transfer_to_local_path(path): ) -@pytest.mark.skipif(platform.system() == "Windows" - and (platform.python_version().startswith("3.8") - or platform.python_version().startswith("3.7")), - reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") +# @pytest.mark.skipif(platform.system() == "Windows" +# and (platform.python_version().startswith("3.8") +# or platform.python_version().startswith("3.7")), +# reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") def test_upload_download(allkindofcomplexity, tmp_path, server_type_remote_process): tmp_path = str(tmp_path) file = dpf.core.upload_file_in_tmp_folder( From 73902fb4049af7fcd6cfcd53428e8ed3dd8c98c2 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:45:19 +0200 Subject: [PATCH 34/39] Revert "Unskip test_upload_download on Windows for python 3.7 and 3.8" This reverts commit a589153e8352b4ef06734d6ae45c2b68d7f726d3. --- tests/test_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_service.py b/tests/test_service.py index beacea4fa9..24c67375f7 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -54,10 +54,10 @@ def transfer_to_local_path(path): ) -# @pytest.mark.skipif(platform.system() == "Windows" -# and (platform.python_version().startswith("3.8") -# or platform.python_version().startswith("3.7")), -# reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") +@pytest.mark.skipif(platform.system() == "Windows" + and (platform.python_version().startswith("3.8") + or platform.python_version().startswith("3.7")), + reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") def test_upload_download(allkindofcomplexity, tmp_path, server_type_remote_process): tmp_path = str(tmp_path) file = dpf.core.upload_file_in_tmp_folder( From d39f567a285b39d07befe3ab3dec5a3088d5ec5a Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:45:20 +0200 Subject: [PATCH 35/39] Revert "Use tmp_path pytest fixture instead of deprecated tmpdir in test_service.py." This reverts commit c9d13ab191c64ab4cf072d72b7fc4de2b9214c69. --- tests/test_service.py | 120 +++++++++++++++++++++--------------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/tests/test_service.py b/tests/test_service.py index 24c67375f7..afe8d8783d 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -58,11 +58,11 @@ def transfer_to_local_path(path): and (platform.python_version().startswith("3.8") or platform.python_version().startswith("3.7")), reason="Random SEGFAULT in the GitHub pipeline for 3.7-8 on Windows") -def test_upload_download(allkindofcomplexity, tmp_path, server_type_remote_process): - tmp_path = str(tmp_path) +def test_upload_download(allkindofcomplexity, tmpdir, server_type_remote_process): + tmpdir = str(tmpdir) file = dpf.core.upload_file_in_tmp_folder( transfer_to_local_path(allkindofcomplexity), - server=server_type_remote_process + server = server_type_remote_process ) dataSource = dpf.core.DataSources(file, server=server_type_remote_process) op = dpf.core.Operator("S", server=server_type_remote_process) @@ -79,16 +79,16 @@ def test_upload_download(allkindofcomplexity, tmp_path, server_type_remote_proce server=server_type_remote_process) vtk.run() - dpf.core.download_file(vtk_path, os.path.join(tmp_path, "file.vtk"), + dpf.core.download_file(vtk_path, os.path.join(tmpdir, "file.vtk"), server=server_type_remote_process) - assert os.path.exists(os.path.join(tmp_path, "file.vtk")) + assert os.path.exists(os.path.join(tmpdir, "file.vtk")) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_download_folder( - allkindofcomplexity, plate_msup, multishells, tmp_path, server_type_remote_process + allkindofcomplexity, plate_msup, multishells, tmpdir, server_type_remote_process ): - tmp_path = str(tmp_path) + tmpdir = str(tmpdir) file = dpf.core.upload_file_in_tmp_folder( allkindofcomplexity, server=server_type_remote_process ) @@ -100,18 +100,18 @@ def test_download_folder( ) parent_path = os.path.dirname(file) dpf.core.download_files_in_folder( - parent_path, tmp_path, server=server_type_remote_process + parent_path, tmpdir, server=server_type_remote_process ) import ntpath - assert os.path.exists(os.path.join(tmp_path, ntpath.basename(allkindofcomplexity))) - assert os.path.exists(os.path.join(tmp_path, ntpath.basename(plate_msup))) - assert os.path.exists(os.path.join(tmp_path, ntpath.basename(multishells))) + assert os.path.exists(os.path.join(tmpdir, ntpath.basename(allkindofcomplexity))) + assert os.path.exists(os.path.join(tmpdir, ntpath.basename(plate_msup))) + assert os.path.exists(os.path.join(tmpdir, ntpath.basename(multishells))) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") -def test_download_with_subdir(multishells, tmp_path, server_type_remote_process): - tmp_path = str(tmp_path) +def test_download_with_subdir(multishells, tmpdir, server_type_remote_process): + tmpdir = str(tmpdir) file = dpf.core.upload_file_in_tmp_folder( multishells, server=server_type_remote_process ) @@ -130,108 +130,108 @@ def test_download_with_subdir(multishells, tmp_path, server_type_remote_process) folder = parent_path out = dpf.core.download_files_in_folder( - folder, tmp_path, server=server_type_remote_process + folder, tmpdir, server=server_type_remote_process ) - p1 = os.path.join(tmp_path, filename) - p2 = os.path.join(tmp_path, "subdir", filename) - # p1 = tmp_path + "/" + filename - # p2 = tmp_path + "/subdir/" + filename + p1 = os.path.join(tmpdir, filename) + p2 = os.path.join(tmpdir, "subdir", filename) + # p1 = tmpdir + "/" + filename + # p2 = tmpdir + "/subdir/" + filename assert os.path.exists(p1) assert os.path.exists(p2) @pytest.mark.skipif(running_docker, reason="Path hidden within docker container") def test_downloadinfolder_uploadinfolder( - multishells, tmp_path, server_type_remote_process + multishells, tmpdir, server_type_remote_process ): - tmp_path = str(tmp_path) + tmpdir = str(tmpdir) base = dpf.core.BaseService(server=server_type_remote_process) - # create in tmp_path some architecture with subfolder in subfolder - path1 = os.path.join(tmp_path, os.path.basename(multishells)) - path2 = os.path.join(tmp_path, "subdirA", os.path.basename(multishells)) - path4 = os.path.join(tmp_path, "subdirB", os.path.basename(multishells)) + # create in tmpdir some architecture with subfolder in subfolder + path1 = os.path.join(tmpdir, os.path.basename(multishells)) + path2 = os.path.join(tmpdir, "subdirA", os.path.basename(multishells)) + path4 = os.path.join(tmpdir, "subdirB", os.path.basename(multishells)) from shutil import copyfile copyfile(multishells, path1) - os.mkdir(os.path.join(tmp_path, "subdirA")) + os.mkdir(os.path.join(tmpdir, "subdirA")) copyfile(multishells, path2) - os.mkdir(os.path.join(tmp_path, "subdirB")) + os.mkdir(os.path.join(tmpdir, "subdirB")) copyfile(multishells, path4) # upload it TARGET_PATH = base.make_tmp_dir_server() dpf.core.upload_files_in_folder( to_server_folder_path=TARGET_PATH, - client_folder_path=tmp_path, + client_folder_path=tmpdir, specific_extension="rst", server=server_type_remote_process, ) # download it - new_tmp_path = os.path.join(tmp_path, "my_tmp_dir") - os.mkdir(new_tmp_path) + new_tmpdir = os.path.join(tmpdir, "my_tmp_dir") + os.mkdir(new_tmpdir) out = dpf.core.download_files_in_folder( - TARGET_PATH, new_tmp_path, server=server_type_remote_process + TARGET_PATH, new_tmpdir, server=server_type_remote_process ) # check if the architecture of the download is ok - path1_check = os.path.join(new_tmp_path, os.path.basename(multishells)) - path2_check = os.path.join(new_tmp_path, "subdirA", os.path.basename(multishells)) - path4_check = os.path.join(new_tmp_path, "subdirB", os.path.basename(multishells)) + path1_check = os.path.join(new_tmpdir, os.path.basename(multishells)) + path2_check = os.path.join(new_tmpdir, "subdirA", os.path.basename(multishells)) + path4_check = os.path.join(new_tmpdir, "subdirB", os.path.basename(multishells)) assert os.path.exists(path1_check) assert os.path.exists(path2_check) assert os.path.exists(path4_check) # clean - # os.remove(os.path.join(tmp_path, "tmp_path")) - # os.remove(os.path.join(tmp_path, "subdirA")) - # os.remove(os.path.join(tmp_path, "subdirB")) + # os.remove(os.path.join(tmpdir, "tmpdir")) + # os.remove(os.path.join(tmpdir, "subdirA")) + # os.remove(os.path.join(tmpdir, "subdirB")) -# def test_downloadinfolder_uploadinfolder_subsubdir(multishells, tmp_path): +# def test_downloadinfolder_uploadinfolder_subsubdir(multishells, tmpdir): # base = dpf.core.BaseService() -# # create in tmp_path some architecture with subfolder in subfolder -# path1 = os.path.join(tmp_path, os.path.basename(multishells)) -# path2 = os.path.join(tmp_path, "subdirA", os.path.basename(multishells)) -# path3 = os.path.join(tmp_path, "subdirA", "subdir1", os.path.basename(multishells)) -# path4 = os.path.join(tmp_path, "subdirB", os.path.basename(multishells)) +# # create in tmpdir some architecture with subfolder in subfolder +# path1 = os.path.join(tmpdir, os.path.basename(multishells)) +# path2 = os.path.join(tmpdir, "subdirA", os.path.basename(multishells)) +# path3 = os.path.join(tmpdir, "subdirA", "subdir1", os.path.basename(multishells)) +# path4 = os.path.join(tmpdir, "subdirB", os.path.basename(multishells)) # from shutil import copyfile # copyfile(multishells, path1) -# os.mkdir(os.path.join(tmp_path, "subdirA")) +# os.mkdir(os.path.join(tmpdir, "subdirA")) # copyfile(multishells, path2) -# os.mkdir(os.path.join(tmp_path, "subdirA", "subdir1")) +# os.mkdir(os.path.join(tmpdir, "subdirA", "subdir1")) # copyfile(multishells, path3) -# os.mkdir(os.path.join(tmp_path, "subdirB")) +# os.mkdir(os.path.join(tmpdir, "subdirB")) # copyfile(multishells, path4) # # upload it # TARGET_PATH = base.make_tmp_dir_server() # base.upload_files_in_folder( # to_server_folder_path = TARGET_PATH, -# client_folder_path = tmp_path, +# client_folder_path = tmpdir, # specific_extension = "rst" # ) # # download it -# new_tmp_path = os.path.join(tmp_path, "tmp_path") -# os.mkdir(new_tmp_path) -# out = dpf.core.download_files_in_folder(TARGET_PATH, new_tmp_path) +# new_tmpdir = os.path.join(tmpdir, "tmpdir") +# os.mkdir(new_tmpdir) +# out = dpf.core.download_files_in_folder(TARGET_PATH, new_tmpdir) # # check if the architecture of the download is ok -# path1_check = os.path.join(new_tmp_path, os.path.basename(multishells)) -# path2_check = os.path.join(new_tmp_path, "subdirA", os.path.basename(multishells)) -# path3_check = os.path.join(new_tmp_path, "subdirA", "subdir1", os.path.basename(multishells)) -# path4_check = os.path.join(new_tmp_path, "subdirB", os.path.basename(multishells)) +# path1_check = os.path.join(new_tmpdir, os.path.basename(multishells)) +# path2_check = os.path.join(new_tmpdir, "subdirA", os.path.basename(multishells)) +# path3_check = os.path.join(new_tmpdir, "subdirA", "subdir1", os.path.basename(multishells)) +# path4_check = os.path.join(new_tmpdir, "subdirB", os.path.basename(multishells)) # assert os.path.exists(path1_check) # assert os.path.exists(path2_check) # assert os.path.exists(path3_check) # assert os.path.exists(path4_check) # # clean -# # os.remove(os.path.join(tmp_path, "tmp_path")) -# # os.remove(os.path.join(tmp_path, "subdirA")) -# # os.remove(os.path.join(tmp_path, "subdirA", "subdir1")) -# # os.remove(os.path.join(tmp_path, "subdirB")) +# # os.remove(os.path.join(tmpdir, "tmpdir")) +# # os.remove(os.path.join(tmpdir, "subdirA")) +# # os.remove(os.path.join(tmpdir, "subdirA", "subdir1")) +# # os.remove(os.path.join(tmpdir, "subdirB")) -def test_uploadinfolder_emptyfolder(tmp_path, server_type_remote_process): - tmp_path = str(tmp_path) +def test_uploadinfolder_emptyfolder(tmpdir, server_type_remote_process): + tmpdir = str(tmpdir) base = dpf.core.BaseService(server=server_type_remote_process) TARGET_PATH = base.make_tmp_dir_server() path = base.upload_files_in_folder( - to_server_folder_path=TARGET_PATH, client_folder_path=tmp_path + to_server_folder_path=TARGET_PATH, client_folder_path=tmpdir ) assert len(path) == 0 From 1882a8af0a29a0f8e9eb7b71e878802cf9dadd90 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:45:21 +0200 Subject: [PATCH 36/39] Revert "Remove debug warning" This reverts commit 9af3ce9ec376e0dadc9b1b6c9355548c6bc145a7. --- ansys/dpf/core/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index 11153d262d..9c514fd0ce 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -105,6 +105,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): """ base = BaseService(server, load_operators=False) path = base.upload_file_in_tmp_folder(file_path, new_file_name) + warnings.warn(f"Uploaded file to {path}") if path is None or path == '': raise ValueError(f"Upload failed! Path returned is:{path}") return path From f04aecb7a991ae05a783e9b774f0ceb255c00e3c Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:45:22 +0200 Subject: [PATCH 37/39] Revert "Debug upload_file_in_tmp_folder" This reverts commit ace582904a8b17bd42f4a64bd47b9b11129090c1. --- ansys/dpf/core/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index 9c514fd0ce..ba157e46d2 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -108,7 +108,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): warnings.warn(f"Uploaded file to {path}") if path is None or path == '': raise ValueError(f"Upload failed! Path returned is:{path}") - return path + return def upload_files_in_folder( From 9ee4f12fafaad0021cc63f459aff4efd2c7f33f4 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 15:45:23 +0200 Subject: [PATCH 38/39] Revert "Debug upload_file_in_tmp_folder" This reverts commit 76c1c242e2d6bda2712efe5fddccf25f4b872eae. --- ansys/dpf/core/core.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index ba157e46d2..72bcec9ba7 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -104,11 +104,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): :class:`ansys.dpf.core.server_types.InProcessServer`. """ base = BaseService(server, load_operators=False) - path = base.upload_file_in_tmp_folder(file_path, new_file_name) - warnings.warn(f"Uploaded file to {path}") - if path is None or path == '': - raise ValueError(f"Upload failed! Path returned is:{path}") - return + return base.upload_file_in_tmp_folder(file_path, new_file_name) def upload_files_in_folder( From 6768e5f75e1f747b21cce17986338e64ebb1e244 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 12 Oct 2022 16:52:19 +0200 Subject: [PATCH 39/39] Abort the build at first example error --- docs/source/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 1b452cccc6..6daa7f2970 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -127,6 +127,8 @@ def reset_servers(gallery_conf, fname, when): "pypandoc": True, # path to your examples scripts "examples_dirs": ["../../examples"], + # abort build at first example error + 'abort_on_example_error': True, # path where to save gallery generated examples "gallery_dirs": ["examples"], # Patter to search for example files