Skip to content

Commit

Permalink
Merge branch 'master' into ci/testing_pipelines
Browse files Browse the repository at this point in the history
# Conflicts:
#	.github/workflows/ci.yml
  • Loading branch information
PProfizi committed Oct 20, 2022
2 parents 3261bec + 17a4359 commit 3a68864
Show file tree
Hide file tree
Showing 29 changed files with 226 additions and 133 deletions.
5 changes: 3 additions & 2 deletions .ci/build_doc.bat
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ call sphinx-apidoc -o ../docs/source/api ../ansys ../ansys/dpf/core/log.py ^
../ansys/dpf/core/help.py ../ansys/dpf/core/mapping_types.py ../ansys/dpf/core/ipconfig.py ^
../ansys/dpf/core/field_base.py ../ansys/dpf/core/cache.py ../ansys/dpf/core/misc.py ^
../ansys/dpf/core/check_version.py ../ansys/dpf/core/operators/build.py ../ansys/dpf/core/operators/specification.py ^
../ansys/dpf/core/vtk_helper.py ../ansys/dpf/core/label_space.py ^
../ansys/dpf/core/vtk_helper.py ../ansys/dpf/core/label_space.py ../ansys/dpf/core/examples/python_plugins/* ^
../ansys/dpf/core/examples/examples.py ^
-f --implicit-namespaces --separate --no-headings
pushd .
cd ../docs/
call make clean
call make html
call make html -v -v -v -P
popd
2 changes: 1 addition & 1 deletion CODE_OF_CONDUCT.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ further defined and clarified by project maintainers.
## Enforcement

Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at info@pyvista.org. All
reported by contacting the project team at pyansys.maintainers@ansys.com. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Expand Down
4 changes: 3 additions & 1 deletion ansys/dpf/core/_version.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Version for ansys-dpf-core"""
# major, minor, patch
version_info = 0, 6, '1dev0'
version_info = 0, 6, 2, 'dev0'

# Nice string for the version
__version__ = ".".join(map(str, version_info))
Expand All @@ -12,6 +12,7 @@
"3.0": ">=0.4.0",
"4.0": ">=0.5.0",
"5.0": ">=0.6.0",
"6.0": ">=0.7.*",
}

server_to_ansys_version = {
Expand All @@ -20,4 +21,5 @@
"3.0": "2022R1",
"4.0": "2022R2",
"5.0": "2023R1",
"6.0": "2023R2",
}
12 changes: 7 additions & 5 deletions ansys/dpf/core/fields_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,6 +536,11 @@ def animate(self, save_as=None, deform_by=None, scale_factor=1.0, **kwargs):
deform = False
if deform_by and not isinstance(deform_by, dpf.core.FieldsContainer):
deform_by = deform_by.eval()
if len(deform_by) != len(self):
raise ValueError("'deform_by' argument must result in a FieldsContainer "
"of same length as the animated one "
f"(len(deform_by.eval())={len(deform_by)} "
f"!= len(self)={len(self)}).")
else:
deform = False
if deform:
Expand All @@ -561,15 +566,12 @@ def animate(self, save_as=None, deform_by=None, scale_factor=1.0, **kwargs):
# Addition to the scaled deformation field
add_op = dpf.core.operators.math.add(divide_op.outputs.field,
get_coordinates_op.outputs.coordinates_as_field)
wf.set_output_name("deform_by", add_op.outputs.field)
add_op.progress_bar = False
else:
scale_factor = None
scale_factor_fc = dpf.core.animator.scale_factor_to_fc(1.0, self)
extract_scale_factor_op = dpf.core.operators.utility.extract_field(scale_factor_fc)
add_op = dpf.core.operators.utility.forward_field(extract_scale_factor_op)
wf.set_output_name("deform_by", add_op.outputs.field)
wf.set_output_name("to_render", extract_field_op.outputs.field)
wf.progress_bar = False
add_op.progress_bar = False

loop_over = self.get_time_scoping()
frequencies = self.time_freq_support.time_frequencies
Expand Down
16 changes: 16 additions & 0 deletions ansys/dpf/core/meshed_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,21 @@
from ansys.dpf.gate import meshed_region_capi, meshed_region_grpcapi


def update_grid(func):
# Decorate mesh setters to centralize the update logic of pyvista objects.
def wrapper(*args, **kwargs):
mesh = args[0]
if mesh._full_grid is not None:
# Treat each setter separately to improve performance by updating the minimum required.
if func.__name__ == 'set_coordinates_field':
# When setting node coordinates
from ansys.dpf.core.vtk_helper import vtk_update_coordinates
vtk_update_coordinates(vtk_grid=mesh._full_grid, coordinates_array=args[1].data)

return func(*args, **kwargs)
return wrapper


@class_handling_cache
class MeshedRegion:
"""
Expand Down Expand Up @@ -275,6 +290,7 @@ def set_property_field(self, property_name, value):
else:
self._api.meshed_region_set_property_field(self, property_name, value)

@update_grid
@version_requires("3.0")
def set_coordinates_field(self, coordinates_field):
"""
Expand Down
3 changes: 3 additions & 0 deletions ansys/dpf/core/plotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,6 +391,8 @@ def add_mesh(self, meshed_region, deform_by=None, scale_factor=1.0, **kwargs):
>>> pl.add_mesh(mesh)
"""
if meshed_region.grid is not None:
meshed_region.grid.clear_data()
self._internal_plotter.add_mesh(meshed_region=meshed_region,
deform_by=deform_by,
scale_factor=scale_factor,
Expand Down Expand Up @@ -767,6 +769,7 @@ def plot_contour(
self._internal_plotter.add_scale_factor_legend(scale_factor, **kwargs)
else:
grid = mesh.grid
grid.clear_data()
self._internal_plotter._plotter.add_mesh(grid, scalars=overall_data, **kwargs_in)

background = kwargs.pop("background", None)
Expand Down
24 changes: 21 additions & 3 deletions ansys/dpf/core/server_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,15 +282,21 @@ def check_ansys_grpc_dpf_version(server, timeout):
raise TimeoutError(
f"Failed to connect to {server._input_ip}:{server._input_port} in {timeout} seconds"
)

compatibility_link = (f"https://dpf.docs.pyansys.com/getting_started/"
f"index.html#client-server-compatibility")
LOG.debug("Established connection to DPF gRPC")
grpc_module_version = ansys.grpc.dpf.__version__
server_version = server.version
right_grpc_module_version = server_to_ansys_grpc_dpf_version.get(server_version, None)
if right_grpc_module_version is None: # pragma: no cover
# warnings.warn(f"No requirement specified on ansys-grpc-dpf for server version "
# f"{server_version}. Continuing with the ansys-grpc-dpf version "
# f"installed ({grpc_module_version}). In case of unexpected instability, "
# f"please refer to the compatibility guidelines given in "
# f"{compatibility_link}.")
return
if not _compare_ansys_grpc_dpf_version(right_grpc_module_version, grpc_module_version):
ansys_version_to_use = server_to_ansys_version.get(server_version, 'Unknown')
compatibility_link = (f"https://dpfdocs.pyansys.com/getting_started/"
f"index.html#client-server-compatibility")
ansys_versions = core._version.server_to_ansys_version
latest_ansys = ansys_versions[max(ansys_versions.keys())]
raise ImportWarning(f"An incompatibility has been detected between the DPF server version "
Expand Down Expand Up @@ -532,6 +538,7 @@ def __init__(self,
launch_server=True,
docker_name=None,
use_pypim=True,
num_connection_tryouts=3,
):
# Load DPFClientAPI
from ansys.dpf.core.misc import is_pypim_configured
Expand Down Expand Up @@ -563,8 +570,19 @@ def __init__(self,
self._input_port = port
self.live = True
self._create_shutdown_funcs()
self._check_first_call(num_connection_tryouts)
self.set_as_global(as_global=as_global)

def _check_first_call(self, num_connection_tryouts):
for i in range(num_connection_tryouts):
try:
self.version
break
except errors.DPFServerException as e:
if ("GOAWAY" not in str(e.args) and "unavailable" not in str(e.args)) \
or i == (num_connection_tryouts - 1):
raise e

@property
def version(self):
from ansys.dpf.gate import data_processing_capi, integral_types
Expand Down
5 changes: 5 additions & 0 deletions ansys/dpf/core/vtk_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,3 +226,8 @@ def compute_offset():
offset = compute_offset()

return pv.UnstructuredGrid(offset, cells, vtk_cell_type, nodes)


def vtk_update_coordinates(vtk_grid, coordinates_array):
from copy import copy
vtk_grid.points = copy(coordinates_array)
25 changes: 25 additions & 0 deletions docs/make.bat
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ set SOURCEDIR=source
set BUILDDIR=build

if "%1" == "" goto help
if "%1" == "clean" goto clean

%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
Expand All @@ -28,6 +29,30 @@ if errorlevel 9009 (
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end

:clean
echo.Cleaning files form previous build...
IF EXIST "build" (
rmdir "build" /s /q
)
IF EXIST "source\images\auto-generated" (
rmdir "source\images\auto-generated" /s /q
)
IF EXIST "source\examples\07-python-operators\plugins" (
robocopy "source\examples\07-python-operators\plugins" "source\_temp\plugins" /E >nul 2>&1
)
IF EXIST "source\examples" (
rmdir "source\examples" /s /q
)
IF EXIST "source\_temp\plugins" (
robocopy "source\_temp\plugins" "source\examples\07-python-operators\plugins" /E >nul 2>&1
)
IF EXIST "source\_temp" (
rmdir "source\_temp" /s /q
)

echo.Done.
goto end

:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%

Expand Down
37 changes: 34 additions & 3 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@

autosummary_generate = True

autodoc_mock_imports = ["ansys.dpf.core.examples.python_plugins"]

# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
Expand All @@ -85,7 +86,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
language = "en"

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
Expand All @@ -99,11 +100,35 @@
# -- Sphinx Gallery Options
from sphinx_gallery.sorting import FileNameSortKey


def reset_servers(gallery_conf, fname, when):
import psutil
from ansys.dpf.core import server
import gc

gc.collect()
server.shutdown_all_session_servers()

proc_name = "Ans.Dpf.Grpc"
nb_procs = 0
for proc in psutil.process_iter():
try:
# check whether the process name matches
if proc_name in proc.name():
# proc.kill()
nb_procs += 1
except psutil.NoSuchProcess:
pass
print(f"Counted {nb_procs} {proc_name} processes {when} the example.")


sphinx_gallery_conf = {
# convert rst to md for ipynb
"pypandoc": True,
# path to your examples scripts
"examples_dirs": ["../../examples"],
# abort build at first example error
'abort_on_example_error': True,
# path where to save gallery generated examples
"gallery_dirs": ["examples"],
# Patter to search for example files
Expand All @@ -118,18 +143,24 @@
# 'first_notebook_cell': ("%matplotlib inline\n"
# "from pyvista import set_plot_theme\n"
# "set_plot_theme('document')"),
"reset_modules_order": 'both',
"reset_modules": (reset_servers,),
}

autodoc_member_order = "bysource"


# -- Options for HTML output -------------------------------------------------
html_short_title = html_title = "PyDPF-Core"
html_theme = "ansys_sphinx_theme"
html_logo = pyansys_logo_black
html_theme_options = {
"github_url": "https://github.com/pyansys/DPF-Core",
"github_url": "https://github.com/pyansys/pydpf-core",
"show_prev_next": False,
"logo_link": "https://dpfdocs.pyansys.com/" # navigate to the main page
"show_breadcrumbs": True,
"additional_breadcrumbs": [
("PyAnsys", "https://docs.pyansys.com/"),
],
}


Expand Down
2 changes: 1 addition & 1 deletion docs/source/user_guide/custom_operators.rst
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ For a plugin that is a single script, the second argument should be ``py_`` plus
"py_custom_plugin", #if the load_operators function is defined in path/to/plugins/custom_plugin.py
"load_operators")
For a plug-in package, the second argument should be ``_py`` plus any name:
For a plug-in package, the second argument should be ``py_`` plus any name:

.. code::
Expand Down
4 changes: 2 additions & 2 deletions docs/source/user_guide/operators.rst
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ You can create data sources in two ways:


Because several other examples use the ``Model`` class, this example uses the
``DataSources``class:
``DataSources`` class:

.. code-block:: python
Expand Down Expand Up @@ -168,7 +168,7 @@ can also be connected to work on a temporal subset:
Evaluate operators
~~~~~~~~~~~~~~~~~~
With all the required inputs assigned, you can output the :class:`ansys.dpf.core.fields_container`_
With all the required inputs assigned, you can output the :class:`ansys.dpf.core.fields_container`
class from the operator:

.. code-block:: python
Expand Down
11 changes: 6 additions & 5 deletions examples/00-basic/03-create_entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def search_sequence_numpy(arr, seq):

###############################################################################
# Add nodes:
id = 1
n_id = 1
for i, x in enumerate(
[
float(i) * length / float(num_nodes_in_length)
Expand All @@ -58,8 +58,8 @@ def search_sequence_numpy(arr, seq):
for i in range(0, num_nodes_in_depth)
]
):
mesh.nodes.add_node(id, [x, y, z])
id += 1
mesh.nodes.add_node(n_id, [x, y, z])
n_id += 1

###############################################################################
# Get the nodes' coordinates field:
Expand All @@ -75,7 +75,7 @@ def search_sequence_numpy(arr, seq):

###############################################################################
# Add solid elements (linear hexa with eight nodes):
id = 1
e_id = 1
for i, x in enumerate(
[
float(i) * length / float(num_nodes_in_length)
Expand Down Expand Up @@ -111,7 +111,8 @@ def search_sequence_numpy(arr, seq):
tmp = connectivity[6]
connectivity[6] = connectivity[7]
connectivity[7] = tmp
mesh.elements.add_solid_element(id, connectivity)
mesh.elements.add_solid_element(e_id, connectivity)
e_id += 1
mesh.plot()

###############################################################################
Expand Down
2 changes: 1 addition & 1 deletion examples/03-advanced/04-extrapolation_stress_3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
###############################################################################
# Nodal stress result of integration points
###############################################################################
# The MAPLD command ``ERESX,NO``is used to copy directly the
# The MAPLD command ``ERESX,NO`` is used to copy directly the
# Gaussian (integration) points results to the nodes, instead of the
# results at nodes or elements (which are interpolation of results at a
# few gauss points).
Expand Down
2 changes: 1 addition & 1 deletion examples/03-advanced/05-extrapolation_strain_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
###############################################################################
# Extrapolate from integration points for elastic strain result
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This example uses the ``gauss_to_node_fc``operator to compute nodal component
# This example uses the ``gauss_to_node_fc`` operator to compute nodal component
# elastic strain results from the elastic strain at the integration points.

# Create elastic strain operator to get strain result of integration points
Expand Down
2 changes: 1 addition & 1 deletion examples/03-advanced/10-asme_secviii_divtwo.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
#
# - ``alfasl`` = input("Introduce ``alfasl`` parameter from ASME\n")
# - ``alfasl`` = float(alfasl)
# -``m2`` = input("Introduce ``m2`` parameter from ASME\n")
# - ``m2`` = input("Introduce ``m2`` parameter from ASME\n")
# - ``m2`` = float(m2)
#
# For this exercise, ``alfasl`` = 2.2 and ``m2`` = .288, which is the same
Expand Down
Loading

0 comments on commit 3a68864

Please sign in to comment.