diff --git a/doc/source/_static/custom.css b/doc/source/_static/custom.css index c621c7b6474..f2239c4e7f3 100644 --- a/doc/source/_static/custom.css +++ b/doc/source/_static/custom.css @@ -2,3 +2,16 @@ .wy-table-responsive table td, .wy-table-responsive table th { white-space: normal; } +/* Check https://www.w3schools.com/cssref/css_colors.php for colors */ +/* Ansys gold for MAPDL with black text*/ +.sd-bg-mapdl{background-color: #FFB71B} +.sd-bg-text-mapdl{color: Black} +/* Ansys orange accent color for Fluent with black text*/ +.sd-bg-lsdyna{background-color: #FB471F} +.sd-bg-text-lsdyna{color: Black} +/* Ansys blue accent color #00629F for Fluent with black text*/ +.sd-bg-fluent{background-color: #0081D0} +.sd-bg-text-fluent{color: Black} +.sd-bg-cfx{background-color: LightSeaGreen} +.sd-bg-text-cfx{color: Black} +.sd-hide-link-text{height: 0} diff --git a/doc/source/conf.py b/doc/source/conf.py index 5589e4a33a8..3293be97668 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -19,6 +19,7 @@ # Make sphinx_utilities modules importable sys.path.append(os.path.join(os.path.dirname(__file__), "../sphinx_utilities")) +from version_filtering import get_tutorial_version_requirements # Manage errors pyvista.set_error_output_file("errors.txt") @@ -59,13 +60,12 @@ ) server_version = server_instance.version server.shutdown_all_session_servers() -print(f"DPF version: {server_version}") -print(f"DPF install: {server_instance.ansys_path}") +print("".rjust(40, '*')) +print(f"Doc built for DPF server version {server_version} at:\n{server_instance.ansys_path}") +print("".rjust(40, '*')) # Build ignore pattern ignored_pattern = r"(ignore" -header_flag = "\"\"\"" -note_flag = r".. note::" for example in sorted(glob(r"../../examples/**/*.py")): minimum_version_str = get_example_required_minimum_dpf_version(example) if float(server_version) - float(minimum_version_str) < -0.05: @@ -76,6 +76,15 @@ ignored_pattern += "|06-distributed_stress_averaging.py" ignored_pattern += r")" +exclude_patterns = [] +for tutorial_file in glob(str(Path("user_guide")/"tutorials"/"**"/"*.rst")): + if Path(tutorial_file).name == "index.rst": + continue + minimum_version_str = get_tutorial_version_requirements(tutorial_file) + if float(server_version) - float(minimum_version_str) < -0.05: + print(f"Tutorial {Path(tutorial_file).name} skipped as it requires DPF {minimum_version_str}.") + exclude_patterns.append(tutorial_file.replace("\\", "/")) + # Autoapi ignore pattern autoapi_ignore_list = [ "*/log.py", @@ -118,6 +127,7 @@ "sphinx_design", "sphinx_jinja", 'sphinx_reredirects', + "jupyter_sphinx", ] redirects = { @@ -137,6 +147,7 @@ autosummary_generate = False +autodoc_mock_imports = ["ansys.dpf.core.examples.python_plugins"] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] @@ -160,7 +171,14 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns.extend(["links_and_refs.rst"]) + +# make rst_epilog a variable, so you can add other epilog parts to it +rst_epilog = "" + +# Read links and targets from file +with open("links_and_refs.rst") as f: + rst_epilog += f.read() # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -358,6 +376,20 @@ # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] +# Define custom docutils roles for solver badges +from sphinx_design.badges_buttons import BadgeRole + +def setup(app): + badge_roles = { + "bdg-mapdl": "mapdl", + "bdg-cfx": "cfx", + "bdg-fluent": "fluent", + "bdg-lsdyna": "lsdyna" + } + + for role_name, color in badge_roles.items(): + app.add_role(name=role_name, role=BadgeRole(color=color)) + # Common content for every RST file such us links rst_epilog = "" links_filepath = Path(__file__).parent.absolute() / "links.rst" @@ -387,3 +419,5 @@ BUILD_EXAMPLES = True if os.environ.get("BUILD_EXAMPLES", "true") == "true" else False if BUILD_EXAMPLES: extensions.extend(["sphinx_gallery.gen_gallery"]) + +print(f"{extensions=}") diff --git a/doc/source/getting_started/contribute/developer.rst b/doc/source/getting_started/contribute/developer.rst index fb1a7f3b2dc..dc22c3b6afb 100644 --- a/doc/source/getting_started/contribute/developer.rst +++ b/doc/source/getting_started/contribute/developer.rst @@ -1,27 +1,14 @@ +.. _contributing_as_a_developer: + Contributing as a developer ########################### -.. grid:: 1 2 3 3 - :padding: 2 2 2 2 - - .. grid-item-card:: :fa:`download` Clone the repository - :link: clone-the-repository - :link-type: ref - - Download your own copy in your local machine. - - .. grid-item-card:: :fa:`download` Install for developers - :link: install-for-developers - :link-type: ref - - Install the project in editable mode. - - .. grid-item-card:: :fa:`vial-circle-check` Run the tests - :link: run-tests - :link-type: ref - - Verify your changes by testing the project. +You can contribute to PyDPF-Core by fixing bugs, adding new features, and improving the codebase. +To do so, you must set up the repository on your local machine by following the steps below: +- :ref:`clone-the-repository` +- :ref:`install-for-developers` +- :ref:`run-tests` .. _clone-the-repository: @@ -44,9 +31,13 @@ Installing PyDPF-Core in development mode allows you to perform changes to the c and see the changes reflected in your environment without having to reinstall the library every time you make a change. +To do so, follow the steps below. + Virtual environment ------------------- +First, set up a new virtual environment. + Start by navigating to the project's root directory by running: .. code-block:: bash @@ -117,7 +108,9 @@ Install Tox Once the project is installed, you can install `Tox`_. This is a cross-platform automation tool. The main advantage of Tox is that it eases routine tasks like project testing, documentation generation, and wheel building in separate and isolated Python -virtual environments. To install Tox, run: +virtual environments. + +To install Tox, run: .. code-block:: text @@ -154,10 +147,14 @@ Run the tests ============= Once you have made your changes, you can run the tests to verify that your -modifications did not break the project. PyDPF-Core tests are organized into groups and require additional steps +modifications did not break the project. + +PyDPF-Core tests are organized into groups and require additional steps during execution to ensure tests run as expected without errors, therefore, PyDPF-Core tox configuration supports different markers to account for this. These markers are associated with a -dedicated `Tox`_ environment. To also allow flexibity required during development, different DPF Server installation +dedicated `Tox`_ environment. + +To also allow flexibility required during development, a specific DPF Server installation can also be used as explained in the subsections that follow. Unified DPF Server installation or specific DPF Server installation using ANSYS_DPF_PATH environment variable diff --git a/doc/source/getting_started/contribute/documentarian.rst b/doc/source/getting_started/contribute/documentarian.rst index 402c8c23366..384c54b5f23 100644 --- a/doc/source/getting_started/contribute/documentarian.rst +++ b/doc/source/getting_started/contribute/documentarian.rst @@ -1,20 +1,35 @@ -Contributing as a documentarian -############################### +.. _contributing_documentation: + +Contributing to the documentation +################################# + +.. note:: + + Overall guidance on contributing to the documentation of a PyAnsys repository appears in + `Documenting`_ in the *PyAnsys Developer's Guide*. + + You must also follow the `Documentation style`_ guide to + ensure that all the documentation looks the same across the project. + +To contribute on the documentation you must start by setting up the PyDPF-Core repository +by following the steps in :ref:`contributing_as_a_developer` section. + +In this page you can check how to : .. grid:: 1 2 3 3 :padding: 2 2 2 2 - .. grid-item-card:: :fa:`pencil` Write documentation - :link: write-documentation + .. grid-item-card:: :fa:`th` Structure the documentation + :link: structure-documentation :link-type: ref - Explain how to get started, use, and contribute to the project. + How the documentation is structured and where to locate files. - .. grid-item-card:: :fa:`laptop-code` Add a new example - :link: write-examples + .. grid-item-card:: :fa:`pencil` Write documentation + :link: write-product-use-documentation :link-type: ref - Showcase the capabilities of PyDPF-Core by adding a new example. + Explains and showcases the use of PyDPF-Core. .. grid-item-card:: :fa:`book` Build the documentation :link: build-documentation @@ -22,16 +37,13 @@ Contributing as a documentarian Render the documentation to see your changes reflected. -.. _write-documentation: +.. _structure-documentation: -Write documentation -=================== +Structure the documentation +=========================== The documentation generator used in PyDPF-Core is `Sphinx`_. Most of the documents -are written in `reStructuredText`_. Some parts of the documentation, like the -:ref:`examples `, use a mix of `reStructuredText`_ and Python, thanks to `Sphinx-Gallery`_. -If you are interested in writing examples, see the :ref:`writing examples ` -section. +are written in `reStructuredText`_. The documentation is located in the ``doc/source`` directory. The landing page is declared in the ``doc/source/index.rst`` file. The rest of the files contain @@ -42,7 +54,7 @@ files. The layout of the ``doc/source`` directory is reflected in the slug of the online documentation. For example, the ``doc/source/getting_started/contribute/documentarian.rst`` renders as -``https://dpf.docs.pyansys.com/getting_started/contribute/documentarian.html``. +``https://dpf.docs.pyansys.com/getting_started/contribute/documentarian.html``. Thus, if you create a new file, it important to follow these rules: @@ -70,38 +82,75 @@ A table of contents can be declared using a directive like this: The path to the file is relative to the directory where the table of contents is declared. -.. _write-examples: +.. _write-product-use-documentation: -Write a new example +Write documentation =================== -The :ref:`examples ` section of the documentation showcases different -capabilities of PyDPF-Core. Each example (grouped into folders of related examples) -is a standalone Python script. Despite being ``*.py`` files, they are written in a mix -of `reStructuredText`_ and Python. This is possible thanks to the `Sphinx-Gallery`_ -Sphinx extension. +Our documentation tries to follow a structure principle that respects four different functions of the documentation. +Each of them fulfills a different need for people working with our tool at different times, in different circumstances. + +Here is an overview of how our documentation is organized to help you know where you should include your contributions. +Each section has their own guidelines that must be followed when creating new content. +To check these specific guidelines click on the correspondent card below. + +.. grid:: 1 1 2 2 + :gutter: 2 + :padding: 2 + :margin: 2 -Documentarians writing new examples are encouraged to familiarize themselves with -`structuring Python scripts for Sphinx-Gallery `_. -Once the ``.py`` file for a new example is properly set up, Sphinx-Gallery automatically -generates `Sphinx`_ `reStructuredText`_ files from it. The rendering of the resulting reST will provide -users with ``.ipynb`` (Jupyter notebook) and ``.py`` files of each example, which users can download. + .. grid-item-card:: **TUTORIALS** + :link: ref_guidelines_tutorials + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center -Finally, here are some tips for writing examples: + Learning oriented + ^^^^^^^^^^^^^^^^^ -- Start the example with an explanation of the main topic. Try to use as many relevant - keywords as possible in this section to optimize for Search Engine Optimization. + **Function:** Teach how to get started and use PYDPF-core step by step -- Include an explanation with each code cell. The explanations should - be included before, not after, the corresponding code. + Teach how to perform a task and showcase the underlying concepts, + providing detailed explanations at each stage. A tutorial is centered around a given feature. -- The examples are built with the documentation. As part of the build process, - screenshots of rendered graphics are inserted in the document. You do not need - to include the screenshots yourself. + .. grid-item-card:: **EXAMPLES** + :link: ref_guidelines_examples + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center -- When creating a new folder where more than one related example will be included, ensure - a ``README.txt`` file is also included. This file should contain reST to be used as the header - for the index page corresponding to the subsection for these examples in the generated documentation. + Use-cases oriented + ^^^^^^^^^^^^^^^^^^ + + **Function:** Show how to solve specifics key problems + + Showcase a specific key problem or use-case with a complete PyDPF script. They are more advanced than + tutorials as they present end-to-end engineering workflows and assume basic knowledge of PyDPF-Core. + + .. grid-item-card:: **CONCEPTS** + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Understanding oriented + ^^^^^^^^^^^^^^^^^^^^^^ + + **Function:** Provide useful theoretical explanations for PyDPF-Core + + Discuss and explain key DPF principles and concepts, for the reader to understand the spirit of the underlying tool. + + + .. grid-item-card:: **API REFERENCE** + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Informing oriented + ^^^^^^^^^^^^^^^^^^ + + **Function:** Describe PyDPF-Core APIs + + Provides technical reference on how PyDPF-Core works and how to use it but assume basic + understanding of key DPF concepts. It is generated automatically along the documentation and + is based on the source code. .. _build-documentation: @@ -162,3 +211,10 @@ are modified. .. code-block:: text python -m tox -e doc-html -x testenv:doc-html.setenv+="BUILD_API=false" -x testenv:doc-html.setenv+="BUILD_EXAMPLES=false" + +.. toctree:: + :hidden: + :maxdepth: 3 + + guidelines_tutorials + guidelines_examples \ No newline at end of file diff --git a/doc/source/getting_started/contribute/guidelines_examples.rst b/doc/source/getting_started/contribute/guidelines_examples.rst new file mode 100644 index 00000000000..8d8fc4005b7 --- /dev/null +++ b/doc/source/getting_started/contribute/guidelines_examples.rst @@ -0,0 +1,37 @@ +.. _ref_guidelines_examples: + +================= +Writing examples +================= + +The documentation generator used for PyDPF-Core is `Sphinx`_. Most of the documents +are written in `reStructuredText`_. Some parts of the documentation, like the +:ref:`examples `, use a mix of `reStructuredText`_ and Python, thanks to `Sphinx-Gallery`_. + +The :ref:`examples ` section of the documentation showcases different +capabilities of PyDPF-Core. Each example (grouped into folders of related examples) +is a standalone Python script. Despite being ``*.py`` files, they are written in a mix +of `reStructuredText`_ and Python. This is possible thanks to the `Sphinx-Gallery`_ +Sphinx extension. + +Documentarians writing new examples are encouraged to familiarize themselves with +`structuring Python scripts for Sphinx-Gallery `_. +Once the ``.py`` file for a new example is properly set up, Sphinx-Gallery automatically +generates `Sphinx`_ `reStructuredText`_ files from it. The rendering of the resulting reST will provide +users with ``.ipynb`` (Jupyter notebook) and ``.py`` files for each example, which users can download. + +Finally, here are some tips for writing examples: + +- Start the example with an explanation of the main topic. Try to use as many relevant + keywords as possible in this section to optimize for Search Engine Optimization. + +- Include an explanation with each code cell. The explanations should + be included before, not after, the corresponding code. + +- The examples are built with the documentation. As part of the build process, + screenshots of rendered graphics are inserted in the document. You do not need + to include the screenshots yourself. + +- When creating a new example folder, ensure + a ``README.txt`` file is also included. This file should contain reST to be used as the header + for the index page corresponding to the subsection for these examples in the generated documentation. \ No newline at end of file diff --git a/doc/source/getting_started/contribute/guidelines_tutorials.rst b/doc/source/getting_started/contribute/guidelines_tutorials.rst new file mode 100644 index 00000000000..e24617bc268 --- /dev/null +++ b/doc/source/getting_started/contribute/guidelines_tutorials.rst @@ -0,0 +1,699 @@ +.. _ref_guidelines_tutorials: + +================= +Writing tutorials +================= + +.. include:: ../../links_and_refs.rst + +You can improve the PyDPF-Core documentation by adding a: + +- :ref:`New tutorials section`; +- :ref:`New tutorial`. + +To do so, you must follow the guidelines presented here. + +You also need to understand the structure of the ``doc`` directory on the PyDPF-Core library: + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── api + │ │ ├── examples + │ │ ├── getting_started + │ │ ├── images + │ │ ├── user_guide + │ │ ├── conf.py + │ │ ├── index.rst + │ ├── styles + │ ├── make.bat + + +Tutorials are located in the ``doc/source/user_guide`` directory. + +---- + +.. _ref_guidelines_add_new_tutorial_section: + +============================= +Adding a new tutorial section +============================= + +:download:`Download the new tutorial section template` + +.. note:: + + Avoid creating new folders unless absolutely necessary. + When in doubt, mention the location of the new section in the pull request for approval. + If you must create a new folder, make sure to add an ``index.rst`` file with a reference, a title, and a description of the section. + The documentation ignores folders lacking this file. + +Location and naming +------------------- + +The new tutorial section must reside in a new folder such as ``doc/source/user_guide/tutorials/new_section_name``. + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── user_guide + │ │ │ ├── tutorials + │ │ │ ├── new_section + +Structure +--------- + +The section folder must contain an ``index.rst`` file with: + +- a reference tag for referencing this section in other parts of the documentation, +- a title for the tutorial section, +- a general description of the topics covered in the tutorials in this section, +- cards with links to the tutorials, titles, descriptions and applicable solvers, +- a ``Toctree`` for the tutorials in the section to appear in the navigation pane. + +.. literalinclude:: tutorial_section_template.rst + +You must reference the new section ``index.rst`` file in the main user guide page toctree +for it to appear in the sidebar of the user guide main page. You can find this toctree +at the end of the ``doc/source/user_guide/index.rst`` file. +For example: + +.. code-block:: + + .. toctree:: + :maxdepth: 2 + :hidden: + :caption: Tutorials + + tutorials/section_x/index.rst + tutorials/section_y/index.rst + tutorials/section_z/index.rst + tutorials/new_section/index.rst + +---- + +.. _ref_guidelines_add_new_tutorial: + +===================== +Adding a new tutorial +===================== + +:download:`Download the tutorial card template` +:download:`Download the tutorial structure template` +:download:`Download the tutorial content formating template` + +Location and naming +------------------- + +New tutorials correspond to new ``.rst`` files in tutorial section folders, +for example: ``doc/source/user_guide/tutorials/section/new_tutorial.rst`` + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── user_guide + │ │ │ ├── tutorials + │ │ │ ├── section + │ │ │ ├── new_tutorial.rst + +You must also add a new card in the ``index.rst`` file for the tutorial section as well as modify +its toctree. The card must include: + +- a tutorial title, +- a short description, +- badges for the applicable solvers, +- a link (in this case, the reference tag) to the tutorial file. + +.. topic:: Card example + + .. card:: Tutorial title + :text-align: center + :width: 25% + + Short description of the tutorial + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +Structure +--------- + +The tutorial is divided in two main parts: + +- :ref:`Header` +- :ref:`Content` + +.. _ref_guidelines_tutorial_header: + +Header +^^^^^^ + +This first part is essential for clarity, organization and usability of the tutorial. It establishes the purpose +of the tutorial, making it easier to understand what is going to be explained and reference it within the other parts of +the documentation. + +The header must have : + +- a reference tag, +- a tutorial title, +- any substitution text for references to the PyDPF-Core library used in the tutorial, +- a short description (same as for the tutorial card in the tutorial section), +- an introduction, +- download buttons for Python script and Jupyter notebook versions of the tutorial. + +.. literalinclude:: tutorial_structure_template.rst + :end-before: First Step + +The main PyDPF-Core library references are available in the ``doc/source/links_and_refs.rst`` file. +To add a reference, use the substitution text as usual: + +.. code-block:: + + .. _ref_tutorial_template: + + + ============== + Tutorial title + ============== + + Here some text. Here we use the |MeshedRegion| substitution text + +For more information about the predefined references, see the +:download:`links and references file <../../links_and_refs.rst>`. + +.. _ref_guidelines_tutorial_content: + +Content +^^^^^^^ + +The goal of a tutorial is to present a feature or explain how to perform a common task step by step while explaining a behavior or underlying concepts. +Thus, its structure must prioritize clarity, simplicity, and logical flow. + +Sections +~~~~~~~~ + +A well-organized tutorial breaks down complex tasks into manageable steps, presenting information incrementally +to avoid overwhelming the user. It combines concise explanations with actionable instructions, ensuring users +can follow along easily while building their understanding. + +Thus, the sections of the content are the steps themselves. These steps are generally similar to + +#. A first step where you get some data and create DPF objects based on the data; +#. One or more steps where you manipulate the data or the DPF objects; +#. A final step where you reach the objective of the tutorial and obtain the expected result. + +For example: + +A tutorial explains how to plot a mesh using PyDPF-Core. +The steps to achieve this task are: + +#. Import a result file; +#. Extract the mesh; +#. Plot the mesh. + +To create those section, underline it with the appropriate headline characters (here: ``-``). + +.. code-block:: + + Import result file + ------------------ + + First, you ... + + + Extract the mesh + ---------------- + + Then, you extract ... + + + Plot the mesh + ------------- + + Finally, you plot ... + +Tabs +~~~~ + +You must use tabs when a step requires a solver-specific implementation. + +These tabs looks like: + +.. tab-set:: + + .. tab-item:: MAPDL + + Explanation 1 ... + + .. jupyter-execute:: + + # Code block 1 + + .. tab-item:: LSDYNA + + Explanation 2 ... + + .. jupyter-execute:: + + # Code block 2 + + .. tab-item:: Fluent + + Explanation 3 ... + + .. jupyter-execute:: + + # Code block 3 + + .. tab-item:: CFX + + Explanation 4 ... + + .. jupyter-execute:: + + # Code block 4 + + +You can also use tabs if you want to show different approaches to one step and it having the code blocks +in different tabs is clearer. You can see an example of this in the +:ref:`ref_tutorials_animate_time` tutorial. + + +Code blocks +~~~~~~~~~~~ + +The tutorials must have code blocks where you show how you actually implement the code. +In addition to the guidelines presented here, you must also follow the `Coding style `_ +guide to ensure that all code looks the same across the project. + +- Use the `jupyter sphinx `_ extension to show code blocks. It executes embedded code in + a Jupyter kernel and embeds outputs of that code in the document: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + .. jupyter-execute:: + + # This is a executable code block + from ansys.dpf import core as dpf + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + .. code-block:: + + # This is a simple code block + from ansys.dpf import core as dpf + +- Use comments within a code block to clarify the purpose of a line: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the model + model = dpf.Model() + + # Get the stress results + stress_fc = model.results.stress.eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + model = dpf.Model() + stress_fc = model.results.stress.eval() + +- Split your code in several parts to include longer explanations in text format or force showing an intermediate code output: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + Explanation for a first code block and its output + + .. code-block:: + + # Code comment 1 + code1 + + Explanation for a second code block and its output + + .. code-block:: + + # Code comment 2 + code2 + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + A single broad explanation for two steps with outputs mixed together + + .. code-block:: + + # First explanation + # Code comment 1 + code1 + + # Second explanation + # Code comment 2 + code2 + +- When using a PyDPF-Core object or method you must name arguments: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Get the stress results + stress_fc = model.results.stress(time_scoping=time_steps).eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Get the stress results + stress_fc = model.results.stress(time_steps).eval() + +- When quoting APIs in the code comments you must always use their scripting name. Mind the use of + a capital letter to name the DPF objects + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the DataSources object + ds = dpf.DataSources() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the data sources object + ds = dpf.DataSources() + + .. code-block:: + + # Define the Data Sources object + ds = dpf.DataSources() + +- Use blank lines between code lines for better clarity. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the result file path + result_file_path_1 = '/tmp/file.rst' + + # Define the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + + # Create a Model + model_1 = dpf.Model(data_sources=ds_1) + + # Get the stress results + stress_fc = model_1.results.stress.eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the result file path + result_file_path_1 = '/tmp/file.rst' + # Define the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + # Create a Model + model_1 = dpf.Model(data_sources=ds_1) + # Get the stress results + stress_fc = model_1.results.stress.eval() + +- Avoid naming the variables with the same name as an argument or an API. You can get inspirations from the + tutorials available at :ref:`ref_tutorials`. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the result file path + result_file_path = '/tmp/file.rst' + + # Define the DataSources object + ds = dpf.DataSources(result_path=result_file_path) + + # Create a Model + my_model = dpf.Model(data_sources=ds) + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the result file path + result_path = '/tmp/file.rst' + + # Define the DataSources object + data_sources = dpf.DataSources(result_path=result_path) + + # Create a Model + model = dpf.Model(data_sources=data_sources) + +Text formating +~~~~~~~~~~~~~~ + +In addition to the guidelines presented here, you must also follow the `Documentation style `_ +guide to ensure that the tutorials follow a coherent writing style across the project. + +- When quoting APIs in the text you must always use a reference to redirect it to the API reference + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + Here we use the |MeshedRegion| substitution text + + **Rendered text:** + + Here is some text. Here we use the |MeshedRegion| substitution text + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + Here we do not use the MeshedRegion substitution text + + **Rendered text:** + + Here is some text. Here we do not use the MeshedRegion substitution text + +- Use bullet lists when enumerating items: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + This operator accepts as arguments: + + - A Result + - An Operator + - A FieldsContainer + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + This operator accepts a Result, an Operator or a + FieldsContainer as arguments. + +- Use a numbered list for ordered items: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + To extract the mesh you need to follow those steps: + + #. Get the result file; + #. Create a Model; + #. Get the MeshedRegion. + + The ``#.`` renders as a numbered list. + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + To extract the mesh you need to follow those steps: + + - Get the result file; + - Create a Model; + - Get the MeshedRegion. + +- If you need to develop explanations for each item of the list, first, enumerate and reference them. Then, + explore each of them separately in sub headings. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - :ref:`Item 1 ` + - :ref:`Content` + + + .. _ref_tutorial_name_item_1: + + Item 1 + ^^^^^^ + + Presentation of the first item... + + + .. _ref_tutorial_name_item_2: + + Item 2 + ^^^^^^ + + Presentation of the second item... + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - Item 1 + - Item 2 + + Item 1 + ^^^^^^ + Presentation of the first item... + + Item 2 + ^^^^^^ + Presentation of the second item... + + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - Item 1 + Presentation of the first item... + + + - Item 2 + Presentation of the second item... diff --git a/doc/source/getting_started/contribute/tutorial_card_template.rst b/doc/source/getting_started/contribute/tutorial_card_template.rst new file mode 100644 index 00000000000..646a9e98c88 --- /dev/null +++ b/doc/source/getting_started/contribute/tutorial_card_template.rst @@ -0,0 +1,9 @@ +.. grid-item-card:: Tutorial title + :link: ref + :link-type: ref + :text-align: center + + This tutorial ... + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` \ No newline at end of file diff --git a/doc/source/getting_started/contribute/tutorial_content_template.rst b/doc/source/getting_started/contribute/tutorial_content_template.rst new file mode 100644 index 00000000000..a9ab884794f --- /dev/null +++ b/doc/source/getting_started/contribute/tutorial_content_template.rst @@ -0,0 +1,96 @@ + +Tabs for different solvers +-------------------------- + +Showcase a different script for each supported solvers + +.. tab-set:: + + .. tab-item:: MAPDL + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: LSDYNA + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: Fluent + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: CFX + + Explanation ... + + .. jupyter-execute:: + + # Code block + +Bullet lists +------------ + +Enumerate something: + +- something 1; +- something 2; +- something 3. + +Enumerate something with a numbered list: + +#. something 1; +#. something 2; +#. something 3. + +Bullet lists with explanations between items +-------------------------------------------- + +Enumerate something and reference them to use each item as a subheading: + +- :ref:`Something 1`; +- :ref:`Something 2`; +- :ref:`Something 3`. + +.. _ref_something_1: + +Something 1 +^^^^^^^^^^^ + +Explanation 1 + +.. jupyter-execute:: + + # Code block 1 + +.. _ref_something_2: + +Something 2 +^^^^^^^^^^^ + +Explanation 2 + +.. jupyter-execute:: + + # Code block 2 + +.. _ref_something_3: + +Something 3 +^^^^^^^^^^^ + +Explanation 3 + +.. jupyter-execute:: + + # Code block 3 diff --git a/doc/source/getting_started/contribute/tutorial_section_template.rst b/doc/source/getting_started/contribute/tutorial_section_template.rst new file mode 100644 index 00000000000..16ea42a6a32 --- /dev/null +++ b/doc/source/getting_started/contribute/tutorial_section_template.rst @@ -0,0 +1,28 @@ +.. _ref_tutorial_new_section_template: + +============= +Section title +============= + +These tutorials demonstrate how to ... + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Tutorial title + :link: ref + :link-type: ref + :text-align: center + + This tutorial ... + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + tutorial_file.rst \ No newline at end of file diff --git a/doc/source/getting_started/contribute/tutorial_structure_template.rst b/doc/source/getting_started/contribute/tutorial_structure_template.rst new file mode 100644 index 00000000000..8b7536c1cb1 --- /dev/null +++ b/doc/source/getting_started/contribute/tutorial_structure_template.rst @@ -0,0 +1,41 @@ +.. _ref_tutorial_template: + +============== +Tutorial title +============== + +.. |displacement_op| replace:: :class:`ansys.dpf.core.operators.result.displacement.displacement` + +A single sentence describing the goal of the tutorial, which must match the one on the tutorial card in the section page. + +Introduction to the tutorial. Here, you provide the necessary context or foundational information for understanding the tutorial. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +First Step +---------- + +First, you ... + +.. jupyter-execute:: + + # Code block 1 + +Second step +----------- + +Then, you ... + +.. jupyter-execute:: + + # Code block 2 + +Final Step +---------- + +Finally, you ... + +.. jupyter-execute:: + + # Code block 3 diff --git a/doc/source/getting_started/contribute/user.rst b/doc/source/getting_started/contribute/user.rst index 69c9dbb6088..6f85957fb1a 100644 --- a/doc/source/getting_started/contribute/user.rst +++ b/doc/source/getting_started/contribute/user.rst @@ -1,3 +1,5 @@ +.. _contributing_as_a_user: + Contributing as a user ###################### @@ -6,7 +8,9 @@ new features, testing in-development features, starting discussions, answering questions, and sharing their work with the community. .. grid:: 1 2 3 3 - :padding: 2 2 2 2 + :gutter: 2 + :padding: 2 + :margin: 2 .. grid-item-card:: :fa:`bug` Report bugs :link: report-bugs @@ -15,47 +19,43 @@ questions, and sharing their work with the community. Found a bug? Report it here. .. grid-item-card:: :fa:`lightbulb` Request a new feature - :padding: 2 2 2 2 :link: request-a-new-feature :link-type: ref Got an idea for a new feature? Share it! .. grid-item-card:: :fa:`vial-circle-check` Test a new feature - :padding: 2 2 2 2 :link: test-a-new-feature :link-type: ref Anxious to try out a new feature? Here's how you can do it. .. grid-item-card:: :fa:`comments` Start a discussion - :padding: 2 2 2 2 :link: start-a-discussion :link-type: ref Want to discuss something? Start a discussion here. .. grid-item-card:: :fa:`comment-dots` Answer questions - :padding: 2 2 2 2 :link: answer-questions :link-type: ref Help others by answering their questions. .. grid-item-card:: :fa:`bullhorn` Share your work - :padding: 2 2 2 2 :link: share-your-work :link-type: ref Share your work with the community. .. grid-item-card:: :fa:`book` View documentation - :padding: 2 2 2 2 :link: view-documentation :link-type: ref View project documentation. +---- + .. _report-bugs: Report bugs @@ -64,9 +64,9 @@ Report bugs If you encounter a bug or an issue while using the project, please report it. Your feedback helps to identify problems. -- Search the `PyDPF-Core issues`_ to see if the issue has already been reported. +- First, search the `PyDPF-Core issues`_ to see if the issue has already been reported. -- Create a new issue if it hasn’t been reported. +- If it hasn’t been reported, create a new issue in `PyDPF-Core issues`_ using the ``🐞 bug`` template: - Include a clear description of the problem. - Provide steps to reproduce the issue. @@ -78,36 +78,41 @@ Your feedback helps to identify problems. Request a new feature ===================== -Do you have an idea for a new feature or an improvement? Your suggestions are -welcome. You can request a new feature by creating an issue in the `PyDPF-Core issues`_ -board. +Do you have an idea for a new feature or an improvement? Your suggestions are welcome. -.. _test-a-new-feature: +You can request a new feature by creating an issue in the `PyDPF-Core issues`_ +board using the ``💡 New feature`` template. +.. _test-a-new-feature: Test a new feature ================== -It is possible to test a new feature before it is officially released. To do -so, you can install PyDPF-Core from the source code by following the steps below. +It is possible to test a new feature before it is officially released. + +To do so, you can install PyDPF-Core from the source code by following the steps below. Clone the repository -------------------- -Clone and install the latest version of PyDPF-Core by running this code: +Clone the latest version of PyDPF-Core by running this code: .. code-block:: bash git clone https://github.com/ansys/pydpf-core -Install for users ------------------ +Install PyDPF-Core for users +---------------------------- Installing the latest version of PyDPF-Core allows you to test latest features as they are being developed without having to wait for releases. +To do so, by following the steps below. + Virtual environment ~~~~~~~~~~~~~~~~~~~ +First, set up a new virtual environment. + Start by navigating to the project's root directory by running: .. code-block:: @@ -179,8 +184,9 @@ Start a discussion Complex topics may require a discussion. Whether you want to know how to use PyDPF-Core for solving your specific problem or you have a suggestion for a new -feature, a discussion is a good place to start. You can open a new discussion -in the `PyDPF-Core discussions`_ section. +feature, a discussion is a good place to start. + +You can open a new discussion in the `PyDPF-Core discussions`_ section. .. _answer-questions: @@ -188,17 +194,22 @@ Answer questions ================ Another great way to contribute is to help others by answering their questions. -Maintain a positive and constructive attitude while answering questions. If you -don't know the answer, you can still help by pointing the person in the right +Maintain a positive and constructive attitude while answering questions. + +If you do not know the answer, you can still help by pointing the person in the right direction. +To discover how you can help see the `PyDPF-Core discussions`_. + .. _share-your-work: Share your work =============== If you have used PyDPF-Core to create something interesting, share it with the rest -of the community. You can share your work in the `PyDPF-Core discussions`_. Include +of the community. + +You can share your work in the `PyDPF-Core discussions`_. Include a brief description of your work and any relevant links that others may find useful. diff --git a/doc/source/getting_started/contributing.rst b/doc/source/getting_started/contributing.rst index 19baac5e638..65d2041ab83 100644 --- a/doc/source/getting_started/contributing.rst +++ b/doc/source/getting_started/contributing.rst @@ -3,10 +3,14 @@ Contributing ############ +.. include:: ../links_and_refs.rst + +There are several ways to contribute to PyDPF-Core. + Overall guidance on contributing to a PyAnsys repository appears in -`Contribute `_ -in the *PyAnsys Developer's Guide*. Ensure that you are thoroughly familiar -with this guide before attempting to contribute to PyDPF-Core. +`Contributing `_ in the *PyAnsys Developer's Guide*. +Ensure that you are thoroughly familiar with this guide before attempting +to contribute to PyDPF-Core. .. important:: @@ -19,28 +23,27 @@ The following contribution information is specific to PyDPF-Core. Start by selec :padding: 2 2 2 2 .. grid-item-card:: :fa:`user` User - :link: contribute/user - :link-type: doc + :link: contributing_as_a_user + :link-type: ref Report bugs, suggesting features, and ask questions. - .. grid-item-card:: :fa:`book` Documentarian - :link: contribute/documentarian - :link-type: doc - - Improve the documentation and write new guides. - .. grid-item-card:: :fa:`laptop-code` Developer - :link: contribute/developer - :link-type: doc + :link: contributing_as_a_developer + :link-type: ref Fix bugs, add new features, and improve the codebase. + .. grid-item-card:: :fa:`book` Documentation + :link: contributing_documentation + :link-type: ref + + Improve the documentation and write new guides. + .. toctree:: :hidden: :maxdepth: 3 - :caption: Contribute User - Documentarian Developer + Documentation diff --git a/doc/source/getting_started/install.rst b/doc/source/getting_started/install.rst index 628ab2a8b73..6bb908dc7f6 100644 --- a/doc/source/getting_started/install.rst +++ b/doc/source/getting_started/install.rst @@ -4,10 +4,12 @@ Installation ************ +.. include:: ../links_and_refs.rst + Install using ``pip`` --------------------- -The standard package installer for Python is `pip `_. +The standard package installer for Python is `pip `_. To use PyDPF-Core with Ansys 2022 R2 or later, install the latest version with this command: @@ -16,7 +18,7 @@ with this command: pip install ansys-dpf-core -PyDPF-Core plotting capabilities require you to have `PyVista `_ installed. +PyDPF-Core plotting capabilities require you to have `PyVista `_ installed. To install PyDPF-Core with its optional plotting functionalities, run this command: .. code:: @@ -58,7 +60,7 @@ Install without internet If you are unable to install PyDPF-Core on the host machine using ``pip`` due to network isolation, download the wheelhouse corresponding to your platform and Python interpreter version -for the latest release of PyDPF-Core from the assets section of the `latest PyDPF-Core release on GitHub `_. +for the latest release of PyDPF-Core from the assets section of the `latest PyDPF-Core release on GitHub `_. The wheelhouse is a ZIP file containing Python wheels for all the packages PyDPF-Core requires to run. To install PyDPF-Core using the downloaded wheelhouse, unzip the wheelhouse to a local directory, @@ -70,8 +72,8 @@ then use the following command from within this local directory: Note that PyDPF-Core wheelhouses do not include the optional plotting dependencies. To use the plotting capabilities, also download the wheels corresponding to your platform and Python interpreter version -for `PyVista `_ and -`matplotlib `_. Then, place them in the same local directory and run the preceding command. +for `PyVista `_ and +`matplotlib `_. Then, place them in the same local directory and run the preceding command. Install in development mode diff --git a/doc/source/links.rst b/doc/source/links.rst index 8f10b2ba9d3..f899fff05d3 100644 --- a/doc/source/links.rst +++ b/doc/source/links.rst @@ -11,6 +11,9 @@ .. _Sphinx-Gallery: https://sphinx-gallery.github.io/stable/index.html .. _Sphinx: https://www.sphinx-doc.org/en/master/ +.. PyAnsys Developer Guide +.. _Documenting: https://dev.docs.pyansys.com/how-to/contributing.html +.. _Documentation style: https://dev.docs.pyansys.com/coding-style/index.html .. Other links .. _Contributor Covenant Code of Conduct: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ \ No newline at end of file diff --git a/doc/source/links_and_refs.rst b/doc/source/links_and_refs.rst new file mode 100644 index 00000000000..530847288dd --- /dev/null +++ b/doc/source/links_and_refs.rst @@ -0,0 +1,69 @@ +.. _ref_links_and_refs: + +.. LINKS + +.. PyDPF-Core +.. _pydpfcore_issues: https://github.com/ansys/pydpf-core/issues +.. _pydpfcore_discussions: https://github.com/ansys/pydpf-core/discussions +.. _pydpfcore_latest_release: https://github.com/ansys/pydpf-core/releases/latest +.. _pydpfcore_documentation: https://dpf.docs.pyansys.com/ + +.. Pyansys +.. _pyansys: https://docs.pyansys.com/version/dev/ + +.. PyAnsys Developer Guide +.. _dev_guide_pyansys: https://dev.docs.pyansys.com +.. _dev_guide_contributing: https://dev.docs.pyansys.com/how-to/contributing.html +.. _dev_guide_coding_style: https://dev.docs.pyansys.com/coding-style/index.html +.. _dev_guide_setup_your_environment: https://dev.docs.pyansys.com/how-to/setting-up.html +.. _dev_guide_branch_names: https://dev.docs.pyansys.com/how-to/contributing.html#branch-naming-conventions +.. _dev_guide_commit_names: https://dev.docs.pyansys.com/how-to/contributing.html#commit-naming-conventions +.. _dev_guide_doc_style: https://dev.docs.pyansys.com/doc-style/index.html +.. _dev_guide_documenting: https://dev.docs.pyansys.com/how-to/documenting.html# + +.. Other libraries documentations +.. _pyvista_docs: https://docs.pyvista.org/version/stable/ +.. _pyvista_doc_plot_method: https://docs.pyvista.org/api/plotting/_autosummary/pyvista.plot.html#pyvista.plot +.. _pyvista_org: https://pyvista.org/ +.. _jupyter: https://jupyter.org/ +.. _numpy_org: https://numpy.org/ +.. _numpy_docs: https://numpy.org/doc/stable/ +.. _jupyter_sphinx_ext: https://jupyter-sphinx.readthedocs.io/en/latest/ + +.. Other libraries repos +.. _pyvista_github : https://github.com/pyvista/pyvista +.. _matplotlib_github : https://github.com/matplotlib/matplotlib + +.. External links +.. _sphinx: https://www.sphinx-doc.org/en/master/ +.. _sphinx_directives: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html +.. _sphinx_basics: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +.. _vale: https://www.vale.sh +.. _docutils_directives: https://docutils.sourceforge.io/docs/ref/rst/directives.html +.. _numpy_sphinx_ext_doc: https://numpydoc.readthedocs.io/en/latest/ +.. _pip_pypi_page: https://pypi.org/project/pip/ +.. _pyvista_download_files: https://pypi.org/project/pyvista/#files +.. _matplotlib_download_files: https://pypi.org/project/matplotlib/#files + +.. REFERENCES + +.. Main DPF objects +.. |Examples| replace:: :mod:`Examples` +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |Model| replace:: :class:`Model ` +.. |DataSources| replace:: :class:`DataSources ` +.. |Scoping| replace:: :class:`Scoping ` +.. |ScopingsContainer| replace:: :class:`ScopingsContainer ` +.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. |MeshesContainer| replace:: :class:`MeshesContainer ` +.. |MeshInfo| replace:: :class:`MeshInfo ` +.. |Nodes| replace:: :class:`Nodes ` +.. |Elements| replace:: :class:`Elements ` +.. |Faces| replace:: :class:`Faces ` +.. |DpfPlotter| replace:: :class:`DpfPlotter` +.. |Result| replace:: :class:`Result ` +.. |Operator| replace:: :class:`Operator` +.. |TimeFreqSupport| replace:: :class:`TimeFreqSupport ` +.. |PropertyField| replace:: :class:`PropertyField ` +.. |StringField| replace:: :class:`StringField ` \ No newline at end of file diff --git a/doc/source/user_guide/custom_operator_example.py b/doc/source/user_guide/custom_operator_example.py deleted file mode 100644 index 5a62c9875a7..00000000000 --- a/doc/source/user_guide/custom_operator_example.py +++ /dev/null @@ -1,36 +0,0 @@ -from ansys.dpf import core as dpf -from ansys.dpf.core.custom_operator import CustomOperatorBase, record_operator # noqa: F401 -from ansys.dpf.core.operator_specification import CustomSpecification, SpecificationProperties, \ - PinSpecification - - -class CustomOperator(CustomOperatorBase): - @property - def name(self): - return "name_of_my_custom_operator" - - @property - def specification(self) -> CustomSpecification: - spec = CustomSpecification() - spec.description = "What the Operator does." - spec.inputs = { - 0: PinSpecification("name_of_pin_0", [dpf.Field, dpf.FieldsContainer], - "Describe pin 0."), - } - spec.outputs = { - 0: PinSpecification("name_of_pin_0", [dpf.Field], "Describe pin 0."), - } - spec.properties = SpecificationProperties( - user_name="user name", - category="category", - license="license", - ) - return spec - - def run(self): - field = self.get_input(0, dpf.Field) - if field is None: - field = self.get_input(0, dpf.FieldsContainer)[0] - # compute data - self.set_output(0, dpf.Field()) - self.set_succeeded() diff --git a/doc/source/user_guide/custom_operators.rst b/doc/source/user_guide/custom_operators.rst deleted file mode 100644 index 72fb3e9d1c0..00000000000 --- a/doc/source/user_guide/custom_operators.rst +++ /dev/null @@ -1,253 +0,0 @@ -.. _user_guide_custom_operators: - -================ -Custom operators -================ - -In Ansys 2023 R1 and later, you can create custom operators in CPython. Creating custom operators -consists of wrapping Python routines in a DPF-compliant way so that you can access them in the same way -as you access the native operators in the :class:`ansys.dpf.core.dpf_operator.Operator` class in -PyDPF-Core or in any supported client API. - -With support for custom operators, PyDPF-Core becomes a development tool offering: - -- **Accessibility:** A simple script can define a basic operator plugin. - -- **Componentization:** Operators with similar applications can be grouped in Python plugin packages. - -- **Easy distribution:** Standard Python tools can be used to package, upload, and download custom operators. - -- **Dependency management:** Third-party Python modules can be added to the Python package. - -- **Reusability:** A documented and packaged operator can be reused in an infinite number of workflows. - -- **Remotable and parallel computing:** Native DPF capabilities are inherited by custom operators. - -The only prerequisite for creating custom operators is to be familiar with native operators. -For more information, see :ref:`ref_user_guide_operators`. - -Install module --------------- - -Once an Ansys unified installation is complete, you must install the ``ansys-dpf-core`` module in the Ansys -installer's Python interpreter. - -#. Download the script for your operating system: - - - For Windows, download this :download:`PowerShell script `. - - For Linux, download this :download:`Shell script ` - -#. Run the downloaded script for installing with optional arguments: - - - ``-awp_root``: Path to the Ansys root installation folder. For example, the 2023 R1 installation folder ends - with ``Ansys Inc/v231``, and the default environment variable is ``AWP_ROOT231``. - - ``-pip_args``: Optional arguments to add to the ``pip`` command. For example, ``--extra-index-url`` or - ``--trusted-host``. - -To uninstall the ``ansys-dpf-core`` module from the Ansys installation: - -#. Download the script for your operating system: - - - For Windows, download this :download:`PowerShell script `. - - For Linux, download this :download:`Shell script `. - -3. Run the downloaded script for uninstalling with the optional argument: - - - ``-awp_root``: Path to the Ansys root installation folder. For example, the 2023 R1 installation folder ends - with ``Ansys Inc/v231``, and the default environment variable is ``AWP_ROOT231``. - - -Create operators ----------------- -You can create a basic operator plugin or a plugin package with multiple operators. - -Basic operator plugin -~~~~~~~~~~~~~~~~~~~~~ -To create a basic operator plugin, write a simple Python script. An operator implementation -derives from the :class:`ansys.dpf.core.custom_operator.CustomOperatorBase` class and a call to -the :func:`ansys.dpf.core.custom_operator.record_operator` method. - -This example script shows how you create a basic operator plugin: - -.. literalinclude:: custom_operator_example.py - - -.. code-block:: - - def load_operators(*args): - record_operator(CustomOperator, *args) - - -In the various properties for the class, specify the following: - -- Name for the custom operator -- Description of what the operator does -- Dictionary for each input and output pin. This dictionary includes the name, a list of supported types, a description, - and whether it is optional and/or ellipsis (meaning that the specification is valid for pins going from pin - number *x* to infinity) -- List for operator properties, including name to use in the documentation and code generation and the - operator category. The optional ``license`` property lets you define a required license to check out - when running the operator. Set it equal to ``any_dpf_supported_increments`` to allow any license - currently accepted by DPF (see :ref:`here`) - -For comprehensive examples on writing operator plugins, see :ref:`python_operators`. - - -Plugin package with multiple operators -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To create a plugin package with multiple operators or with complex routines, write a -Python package. The benefits of writing packages rather than simple scripts are: - -- **Componentization:** You can split the code into several Python modules or files. -- **Distribution:** You can use standard Python tools to upload and download packages. -- **Documentation:** You can add README files, documentation, tests, and examples to the package. - -A plugin package with dependencies consists of a folder with the necessary files. Assume -that the name of your plugin package is ``custom_plugin``. A folder with this name would -contain four files: - -- ``__init__.py`` -- ``operators.py`` -- ``operators_loader.py`` -- ``common.py`` - -**__init__.py file** - -The ``__init__.py`` file contains this code:: - - from operators_loader import load_operators - - -**operators.py file** - -The ``operators.py`` file contains code like this: - -.. literalinclude:: custom_operator_example.py - - -**operators_loader.py file** - -The ``operators_loader.py`` file contains code like this:: - - from custom_plugin import operators - from ansys.dpf.core.custom_operator import record_operator - - - def load_operators(*args): - record_operator(operators.CustomOperator, *args) - - -**common.py file** - -The ``common.py`` file contains the Python routines as classes and functions:: - - #write needed python routines as classes and functions here. - -Third-party dependencies -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. include:: custom_operators_deps.rst - - -Assume once again that the name of your plugin package is ``custom_plugin``. -A folder with this name would contain these files: - -- ``__init__.py`` -- ``operators.py`` -- ``operators_loader.py`` -- ``common.py`` -- ``winx64.zip`` -- ``linx64.zip`` -- ``custom_plugin.xml`` - -**__init__.py file** - -The ``__init__.py`` file contains this code:: - - from operators_loader import load_operators - - -**operators.py file** - -The ``operators.py`` file contains code like this: - -.. literalinclude:: custom_operator_example.py - - -**operators_loader.py file** - -The ``operators_loader.py`` file contains code like this:: - - from custom_plugin import operators - from ansys.dpf.core.custom_operator import record_operator - - - def load_operators(*args): - record_operator(operators.CustomOperator, *args) - - - def load_operators(*args): - record_operator(operators.CustomOperator, *args) - -**common.py file** - -The ``common.py`` file contains the Python routines as classes and functions:: - - #write needed python routines as classes and functions here. - - -**requirements.txt file** - -The ``requirements.txt`` file contains code like this: - -.. literalinclude:: /examples/07-python-operators/plugins/gltf_plugin/requirements.txt - -The ZIP files for Windows and Linux are included as assets: - -- ``winx64.zip`` -- ``linx64.zip`` - - -**custom_plugin.xml file** - -The ``custom_plugin.xml`` file contains code like this: - -.. literalinclude:: custom_plugin.xml - :language: xml - - -Use custom operators --------------------- - -Once a custom operator is created, you can use the :func:`ansys.dpf.core.core.load_library` method to load it. -The first argument is the path to the directory with the plugin. The second argument is ``py_`` plus any name -identifying the plugin. The last argument is the function name for recording operators. - -For a plugin that is a single script, the second argument should be ``py_`` plus the name of the Python file: - -.. code:: - - dpf.load_library( - r"path/to/plugins", - "py_custom_plugin", #if the load_operators function is defined in path/to/plugins/custom_plugin.py - "load_operators") - -For a plugin package, the second argument should be ``py_`` plus any name: - -.. code:: - - dpf.load_library( - r"path/to/plugins/custom_plugin", - "py_my_custom_plugin", #if the load_operators function is defined in path/to/plugins/custom_plugin/__init__.py - "load_operators") - -Once the plugin is loaded, you can instantiate the custom operator: - -.. code:: - - new_operator = dpf.Operator("custom_operator") # if "custom_operator" is what is returned by the ``name`` property - -References ----------- -For more information, see :py:mod:`Custom Operator Base ` in the **API reference** -and :ref:`python_operators` in **Examples**. diff --git a/doc/source/user_guide/how_to.rst b/doc/source/user_guide/how_to.rst index f4359868841..fb51c3df752 100644 --- a/doc/source/user_guide/how_to.rst +++ b/doc/source/user_guide/how_to.rst @@ -15,15 +15,6 @@ How-tos .. image:: ../images/plotting/pontoon.png .. image:: ../images/plotting/pontoon_strain.png - - - .. grid-item-card:: Create custom operators - :link: user_guide_custom_operators - :link-type: ref - :text-align: center - - .. image:: ../images/drawings/plugin-logo.png - :width: 50% .. grid-item-card:: Use DPF Server package diff --git a/doc/source/user_guide/index.rst b/doc/source/user_guide/index.rst index 7d1dfab4aef..f841e1a952b 100644 --- a/doc/source/user_guide/index.rst +++ b/doc/source/user_guide/index.rst @@ -4,24 +4,24 @@ User guide ========== -PyDPF-Core is a Python client API for accessing DPF postprocessing -capabilities. The ``ansys.dpf.core`` package makes highly efficient -computation, customization, and remote postprocessing accessible in Python. +**DPF** provides numerical simulation users and engineers with a toolbox for accessing and +transforming data. -The goals of this section are to: +**PyDPF-Core** is a Python client API for accessing DPF +capabilities. The ``ansys.dpf.core`` package makes highly efficient +computation, customization, and remote data processing accessible in Python. - - Describe the most-used DPF entities and how they can help you to access and modify solver data. - - Provide simple how-tos for tackling the most common use cases. +The goals of this section are to: -.. include:: - concepts/index.rst + - Describe some DPF entities and how they can help you to access and modify solver data. + - Provide detailed tutorials to demonstrate PyDPF-Core functionalities. + - Explain how to resolve the most common issues encountered when using PyDPF-Core .. include:: - main_entities.rst + tutorials/index.rst .. include:: - how_to.rst - + concepts/index.rst Troubleshooting --------------- @@ -52,38 +52,30 @@ Troubleshooting :text-align: center -.. toctree:: - :maxdepth: 2 - :hidden: - :caption: Concepts - - concepts/concepts.rst - concepts/waysofusing.rst - concepts/stepbystep.rst - - .. toctree:: :maxdepth: 2 :hidden: - :caption: DPF most-used entities + :caption: Tutorials - model - operators - fields_container + tutorials/data_structures/index.rst + tutorials/post_processing_basics/index.rst + tutorials/import_data/index.rst + tutorials/mesh/index.rst + tutorials/plot/index.rst + tutorials/animate/index.rst + tutorials/mathematics/index.rst + tutorials/custom_operators_and_plugins/index.rst -.. toctree:: - :maxdepth: 2 - :hidden: - :caption: How-tos - plotting.rst - custom_operators.rst - dpf_server.rst - server_types.rst - server_context.rst - xmlfiles.rst +.. toctree:: + :maxdepth: 2 + :hidden: + :caption: Concepts + concepts/concepts.rst + concepts/waysofusing.rst + concepts/stepbystep.rst .. toctree:: :maxdepth: 3 diff --git a/doc/source/user_guide/operators.rst b/doc/source/user_guide/operators.rst index f2bd57ecbd0..9aa9a0d5b36 100644 --- a/doc/source/user_guide/operators.rst +++ b/doc/source/user_guide/operators.rst @@ -56,16 +56,19 @@ The library of DPF operators is large and includes file readers and mathematical geometrical, and logical transformations. For more information on this library, which is progressively enhanced, see :ref:`ref_dpf_operators_reference`. +Despite the large number of operators proposed by the standard DPF installation, you may want to +create your own operators and ship them as DPF plugins. +See the tutorials section on custom operators and plugins to learn more. -Create operators -~~~~~~~~~~~~~~~~ -Each operator is of type :py:mod:`Operator `. You can create an instance +Instantiate operators +~~~~~~~~~~~~~~~~~~~~~ +Each operator is of type :py:mod:`Operator `. You can instantiate an operator in Python with any of the derived classes available in the :py:mod:`ansys.dpf.core.operators` package or directly with the :py:mod:`Operator ` class using the internal name string that indicates the operator type. For more information, see :ref:`ref_dpf_operators_reference`. -This example shows how to create the displacement operator: +This example shows how to instantiate the displacement operator: .. code-block:: python diff --git a/doc/source/user_guide/tutorials/animate/animate_time.rst b/doc/source/user_guide/tutorials/animate/animate_time.rst new file mode 100644 index 00000000000..4033589ae5c --- /dev/null +++ b/doc/source/user_guide/tutorials/animate/animate_time.rst @@ -0,0 +1,358 @@ +.. _ref_tutorials_animate_time: + +====================== +Animate data over time +====================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |Animator| replace:: :class:`Animator` +.. |animate| replace:: :func:`FieldsContainer.animate() ` +.. |Workflow| replace:: :class:`Workflow` +.. |Elemental| replace:: :class:`elemental` +.. |ElementalNodal| replace:: :class:`elemental_nodal` +.. |Nodal| replace:: :class:`nodal` +.. |Overall| replace:: :class:`overall` +.. |open_movie| replace:: :class:`pyvista.Plotter.open_movie` + +This tutorial demonstrates how to create 3D animations of data in time. + +:jupyter-download-script:`Download tutorial as Python script` :jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +To animate data across time, you must store the data in a |FieldsContainer| with a ``time`` label. + + +Get the result files +-------------------- + +First, import a results file. For this tutorial, you can use the one available in the |Examples| module. +For more information about how to import your own result file in DPF, see +the :ref:`ref_tutorials_import_data` tutorial section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path = examples.find_msup_transient() + # Create the model + model = dpf.Model(data_sources=result_file_path) + +Define a time scoping +--------------------- + +To animate across time, you must define the time steps you are interested in. +This tutorial retrieves all the time steps available in |TimeFreqSupport|, but you can also filter them. +For more information on how to define a scoping, see the ``Narrow down data`` tutorial in the +:ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Get a scoping of all time steps available + time_steps = model.metadata.time_freq_support.time_frequencies + +Extract the results +------------------- + +Extract the results to animate. In this tutorial, you extract the displacement and stress results. + +.. note:: + + Only the |Elemental|, |Nodal|, or |Faces| locations are supported for animations. + |Overall| and |ElementalNodal| locations are not currently supported. + + +.. jupyter-execute:: + + # Get the displacement fields (already on nodes) at all time steps + disp_fc = model.results.displacement(time_scoping=time_steps).eval() + print(disp_fc) + +.. jupyter-execute:: + + # Get the stress fields on nodes at all time steps + # Request the stress on |Nodal| location as the default |ElementalNodal| location is not supported. + stress_fc = model.results.stress.on_location(location=dpf.locations.nodal).on_time_scoping(time_scoping=time_steps).eval() + print(stress_fc) + +Animate the results +------------------- + +Animate the results with the |animate| method. +You can animate them on a deformed mesh (animate the color map and the mesh) +or on a static mesh (animate the color map only). + +The default behavior of the |animate| method is to: + +- Display the norm of the data components; +- Display data at the top layer for shells; +- Display the deformed mesh when animating displacements; +- Display the static mesh for other types of results; +- Use a constant and uniform scale factor of 1.0 when deforming the mesh. + +You can animate any result on a deformed geometry by providing displacement results in the `deform_by` parameter. + +The geometry can be deformed by a |Result| object, an |Operator| (It must evaluate to a |FieldsContainer| +of same length as the one being animated), or a |FieldsContainer| (also of same length as the one being animated). + +.. note:: + + The behavior of the |animate| method is defined by a |Workflow| it creates and feeds to an |Animator|. + This |Workflow| loops over a |Field| of frame indices and for each frame generates a field of norm contours + to render, as well as a displacement field to deform the mesh if `deform_by` is provided. + For more information on plots on deformed meshes see: :ref:`ref_plotting_data_on_deformed_mesh`. + + +Animate the displacement results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Use |animate| with the displacement results. + +.. tab-set:: + + .. tab-item:: Deformed mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the displacement results in a deformed geometry + disp_fc.animate() + + .. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_1.gif") + + .. image:: animate_disp_1.gif + :scale: 50 % + :align: center + + .. tab-item:: Static mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the displacement results on a static mesh using ``deform_by=False`` + disp_fc.animate(deform_by=False) + + .. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_2.gif", + deform_by=False) + + .. image:: animate_disp_2.gif + :scale: 50 % + :align: center + +Animate the stress +^^^^^^^^^^^^^^^^^^ + +Use |animate| with the stress results. + +.. tab-set:: + + .. tab-item:: Deformed mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the stress results on a deformed mesh + # Use the ``deform_by`` argument and give the displacement results. + stress_fc.animate(deform_by=disp_fc) + + .. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_stress_1.gif", + deform_by=disp_fc) + + .. image:: animate_stress_1.gif + :scale: 50 % + :align: center + + .. tab-item:: Static mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the stress results in a static geometry + stress_fc.animate() + + .. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_stress_2.gif") + + .. image:: animate_stress_2.gif + :scale: 50 % + :align: center + +Change the scale factor +----------------------- + +You can change the scale factor using: + +- A single number for a uniform constant scaling; +- A list of numbers for a varying scaling (same length as the number of frames). + +Uniform constant scaling +^^^^^^^^^^^^^^^^^^^^^^^^ +.. jupyter-execute:: + :hide-output: + + # Define a uniform scale factor + uniform_scale_factor=10. + # Animate the displacements + disp_fc.animate(scale_factor=uniform_scale_factor) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_3.gif", + scale_factor=uniform_scale_factor, text="Uniform scale factor") + +.. image:: animate_disp_3.gif + :scale: 45 % + :align: center + +Varying scaling +^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + # Define a varying scale factor + varying_scale_factor = [float(i) for i in range(len(disp_fc))] + # Animate the displacements + disp_fc.animate(scale_factor=varying_scale_factor) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_4.gif", + scale_factor=varying_scale_factor, text="Varying scale factor") + +.. image:: animate_disp_4.gif + :scale: 45 % + :align: center + +Save the animation +------------------ + +You can save the animation using the ``save_as`` argument with a target file path with the desired format as the extension key. +Accepted extensions are: + +- ``.gif``; +- ``.avi``; +- ``.mp4`` + +For more information see |open_movie|. + +.. jupyter-execute:: + :hide-output: + + # Animate the stress results and save it + stress_fc.animate(deform_by=disp_fc, save_as="animate_stress.gif") + + +Control the camera +------------------ + +Control the camera with the ``cpos`` argument. + +A camera position is a combination of: + +- A position; +- A focal point (the target); +- A upwards vector. + +It results in a list of format: + +.. code-block:: python + + camera_position= [[pos_x, pos_y, pos_z], # position + [fp_x, fp_y, fp_z], # focal point + [up_x, up_y, up_z]] # upwards vector + +The |animate| method accepts a single camera position or a list of camera positions for each frame. + +.. note:: + A tip for defining a camera position is to do a first interactive plot of the data + with argument ``return_cpos=True``, position the camera as desired in the view, and retrieve + the output of the plotting command. + +Fixed camera +^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + # Define the camera position + cam_pos = [[0., 2.0, 0.6], [0.05, 0.005, 0.5], [0.0, 0.0, 1.0]] + # Animate the stress with a custom fixed camera position + stress_fc.animate(cpos=cam_pos) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(save_as="doc/source/user_guide/tutorials/animate/animate_disp_5.gif", + cpos=cam_pos, + off_screen=True) + +.. image:: animate_disp_5.gif + :scale: 50 % + :align: center + +Moving camera +^^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + import copy + # Define the list of camera positions + cpos_list = [cam_pos] + # Incrementally increase the x coordinate of the camera by 0.1 for each frame + for i in range(1, len(disp_fc)): + new_pos = copy.deepcopy(cpos_list[i-1]) + new_pos[0][0] += 0.1 + cpos_list.append(new_pos) + + # Animate the stress with a moving camera + stress_fc.animate(cpos=cpos_list) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(save_as="doc/source/user_guide/tutorials/animate/animate_disp_6.gif", + cpos=cpos_list, + off_screen=True) + +.. image:: animate_disp_6.gif + :scale: 50 % + :align: center + +Additional options +------------------ + +You can use additional PyVista arguments of |open_movie|), such as: + +- Show or hide the coordinate system axis with ``show_axes=True`` or ``show_axes=False``; +- Render off-screen for batch animation creation with ``off_screen=True``; +- Change the frame-rate with ``framerate``; +- Change the image quality with ``quality``. diff --git a/doc/source/user_guide/tutorials/animate/index.rst b/doc/source/user_guide/tutorials/animate/index.rst new file mode 100644 index 00000000000..6f360c42162 --- /dev/null +++ b/doc/source/user_guide/tutorials/animate/index.rst @@ -0,0 +1,27 @@ +.. _ref_tutorials_animate: + +======= +Animate +======= + +These tutorials demonstrate how to visualize the data as an animation. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Animate data over time + :link: ref_tutorials_animate_time + :link-type: ref + :text-align: center + + This tutorial shows how to animate your results data over time. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + animate_time.rst diff --git a/doc/source/user_guide/create_sites_for_python_operators.ps1 b/doc/source/user_guide/tutorials/custom_operators_and_plugins/create_sites_for_python_operators.ps1 similarity index 100% rename from doc/source/user_guide/create_sites_for_python_operators.ps1 rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/create_sites_for_python_operators.ps1 diff --git a/doc/source/user_guide/create_sites_for_python_operators.sh b/doc/source/user_guide/tutorials/custom_operators_and_plugins/create_sites_for_python_operators.sh similarity index 100% rename from doc/source/user_guide/create_sites_for_python_operators.sh rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/create_sites_for_python_operators.sh diff --git a/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators.rst new file mode 100644 index 00000000000..f7ef776dc2d --- /dev/null +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators.rst @@ -0,0 +1,173 @@ +.. _tutorials_custom_operators_and_plugins_custom_operator: + +================ +Custom operators +================ + +.. note:: + + This tutorial requires DPF 7.1 or above (2024 R1). + +This tutorial shows the basics of creating a custom operator in Python and loading it ont a server for use. + +.. note:: + + You can create custom operators in CPython using PyDPF-Core for use with DPF in Ansys 2023 R1 and later. + +It first presents how to :ref:`create a custom DPF operator` +in Python using PyDPF-Core. + +It then shows how to :ref:`make a plugin` +out of this single operator. + +The next step is to :ref:`load the plugin on the server` to record its operators. + +The final step is to instantiate the custom operator from the client API and :ref:`use it`. + +.. note:: + + In this tutorial the DPF client API used is PyDPF-Core but, once recorded on the server, + you can call the operators of the plugin using any of the DPF client APIs + (C++, CPython, IronPython), as you would any other operator. + + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + + +.. _tutorials_custom_operators_and_plugins_custom_operator_create_custom_operator: + +Create a custom operator +------------------------ + +To create a custom DPF operator using PyDPF-Core, define a custom operator class inheriting from +the :class:`CustomOperatorBase ` class in a dedicated Python file. + +The following are sections of a file named `custom_operator_example.py` available under ``ansys.dpf.core.examples.python_plugins``: + +First declare the custom operator class, with necessary imports and a first property to define the operator scripting name: + +.. literalinclude:: /../../src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py + :end-at: return "my_custom_operator" + +Next, set the `specification` property of your operator with: + +- a description of what the operator does +- a dictionary for each input and output pin. This dictionary includes the name, a list of supported types, a description, + and whether it is optional and/or ellipsis (meaning that the specification is valid for pins going from pin + number *x* to infinity) +- a list for operator properties, including name to use in the documentation and code generation and the + operator category. The optional ``license`` property lets you define a required license to check out + when running the operator. Set it equal to ``any_dpf_supported_increments`` to allow any license + currently accepted by DPF (see :ref:`here`) + +.. literalinclude:: /../../src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py + :start-after: return "my_custom_operator" + :end-at: return spec + +Next, implement the operator behavior in its `run` method: + +.. literalinclude:: /../../src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py + :start-after: return spec + :end-at: self.set_succeeded() + +The `CustomOperator` class is now ready for packaging into any DPF Python plugin. + +.. _tutorials_custom_operators_and_plugins_custom_operator_create_custom_plugin: + +Package as a plugin +------------------- + +You must package your custom operator as a `plugin`, +which is what you can later load onto a running DPF server, +or configure your installation to automatically load when starting a DPF server. + +A DPF plugin contains Python modules with declarations of custom Python operators such as seen above. +However, it also has to define an entry-point for the DPF server to call, +which records the operators of the plugin into the server registry of available operators. + +This is done by defining a function (DPF looks for a function named ``load_operators`` by default) +somewhere in the plugin with signature ``*args`` and a call to the +:func:`record_operator() ` method for each custom operator. + +In this tutorial, the plugin is made of a single operator, in a single Python file. +You can transform this single Python file into a DPF Python plugin very easily by adding +``load_operators(*args)`` function with a call to the +:func:`record_operator() ` method at the end of the file. + +.. literalinclude:: /../../src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py + :start-at: def load_operators(*args): + +PS: You can declare several custom operator classes in the same file, with as many calls to +:func:`record_operator() ` as necessary. + +.. _tutorials_custom_operators_and_plugins_custom_operator_load_the_plugin: + +Load the plugin +--------------- + +First, start a server in gRPC mode, which is the only server type supported for custom Python plugins. + +.. jupyter-execute:: + + import ansys.dpf.core as dpf + + # Python plugins are not supported in process. + server = dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer, as_global=False) + +With the server and custom plugin ready, use the :func:`load_library() ` method in a PyDPF-Core script to load it. + +- The first argument is the path to the directory with the plugin. +- The second argument is ``py_``, where is the name identifying the plugin (the name of the Python file for a single file). +- The third argument is the name of the function in the plugin which records operators (``load_operators`` by default). + +.. jupyter-execute:: + + # Get the path to the example plugin + from pathlib import Path + from ansys.dpf.core.examples.python_plugins import custom_operator_example + custom_operator_folder = Path(custom_operator_example.__file__).parent + + # Load it on the server + dpf.load_library( + filename=custom_operator_folder, # Path to the plugin directory + name="py_custom_operator_example", # Look for a Python file named 'custom_operator_example.py' + symbol="load_operators", # Look for the entry-point where operators are recorded + server=server, # Load the plugin on the server previously started + generate_operators=False, # Do not generate the Python module for this operator + ) + + # You can verify the operator is now in the list of available operators on the server + assert "my_custom_operator" in dpf.dpf_operator.available_operator_names(server=server) + +.. _tutorials_custom_operators_and_plugins_custom_operator_use_the_custom_operator: + +Use the custom operator +----------------------- + +Once the plugin is loaded, you can instantiate the custom operator based on its name. + +.. jupyter-execute:: + + my_custom_op = dpf.Operator(name="my_custom_operator", server=server) # as returned by the ``name`` property + print(my_custom_op) + +Finally, run it as any other operator. + +.. jupyter-execute:: + + # Create a bogus field to use as input + in_field = dpf.Field(server=server) + # Give it a name + in_field.name = "initial name" + print(in_field) + # Set it as input of the operator + my_custom_op.inputs.input_0.connect(in_field) + # Run the operator by requesting its output + out_field = my_custom_op.outputs.output_0() + print(out_field) + +References +---------- +For more information, see :ref:`ref_custom_operator` in the **API reference** +and :ref:`python_operators` in **Examples**. diff --git a/doc/source/user_guide/custom_operators_deps.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators_deps.rst similarity index 92% rename from doc/source/user_guide/custom_operators_deps.rst rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators_deps.rst index 8872d77fd64..2ec95a11787 100644 --- a/doc/source/user_guide/custom_operators_deps.rst +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_operators_deps.rst @@ -23,8 +23,8 @@ For this approach, do the following: #. Download the script for your operating system: - - For Windows, download this :download:`PowerShell script `. - - For Linux, download this :download:`Shell script `. + - For Windows, download this :download:`PowerShell script `. + - For Linux, download this :download:`Shell script `. 3. Run the downloaded script with the mandatory arguments: diff --git a/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package.rst new file mode 100644 index 00000000000..8d39c2fc34a --- /dev/null +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package.rst @@ -0,0 +1,90 @@ +.. _tutorials_others_custom_plug_ins_packages: + +======================================= +Plug-in package with multiple operators +======================================= + +This tutorial shows how to create, load and use a custom plug-in package with multiple operators or with complex routines + +Create the plug-in package +-------------------------- + +To create a plug-in package with multiple operators or with complex routines, you write a +Python package. The benefits of writing packages rather than simple scripts are: + +- **Componentization:** You can split the code into several Python modules or files. +- **Distribution:** You can use standard Python tools to upload and download packages. +- **Documentation:** You can add README files, documentation, tests, and examples to the package. + +A plug-in package with dependencies consists of a folder with the necessary files. Assume +that the name of your plug-in package is ``custom_plugin``. A folder with this name would +contain four files: + +- ``__init__.py`` +- ``operators.py`` +- ``operators_loader.py`` +- ``common.py`` + +**__init__.py file** + +The ``__init__.py`` file contains this code:: + + from operators_loader import load_operators + + +**operators.py file** + +The ``operators.py`` file contains code like this: + +.. literalinclude:: custom_operator_example.py + + +**operators_loader.py file** + +The ``operators_loader.py`` file contains code like this:: + + from custom_plugin import operators + from ansys.dpf.core.custom_operator import record_operator + + + def load_operators(*args): + record_operator(operators.CustomOperator, *args) + + +**common.py file** + +The ``common.py`` file contains the Python routines as classes and functions:: + + #write needed python routines as classes and functions here. + + +Load the plug-in package +------------------------ + +Use the :func:`load_library() ` method to load plug-in package. + +- The first argument is the path to the directory where the plug-in package is located. +- The second argument is ``py_`` where is the name identifying the plug-in package. +- The third argument is the name of the function exposed in the __init__ file for the plug-in package that is used to record operators. + +.. code:: + + dpf.load_library( + r"path/to/plugins/custom_plugin", + "py_my_custom_plugin", #if the load_operators function is defined in path/to/plugins/custom_plugin/__init__.py + "load_operators") + + +Use the custom operators +------------------------ + +Once the plugin is loaded, you can instantiate the custom operator: + +.. code:: + + new_operator = dpf.Operator("custom_operator") # if "custom_operator" is what is returned by the ``name`` property + +References +---------- +For more information, see :ref:`ref_custom_operator` in the **API reference** +and :ref:`python_operators` in **Examples**. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package_third_deps.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package_third_deps.rst new file mode 100644 index 00000000000..0af149070ca --- /dev/null +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plug_in_package_third_deps.rst @@ -0,0 +1,158 @@ +.. _tutorials_others_custom_plug_ins_packages_third_deps: + +============================================= +Plug-in package with third-party dependencies +============================================= + +This tutorial shows how to create, load and use a custom plug-in package with third-party dependencies + +Create the plug-in package +-------------------------- + +To create a plug-in package with multiple operators or with complex routines, you write a +Python package. + +A plug-in package with dependencies consists of a folder with the necessary files. Assume +that the name of your plug-in package is ``custom_plugin``. A folder with this name would +contain four files: + +- ``__init__.py`` +- ``operators.py`` +- ``operators_loader.py`` +- ``common.py`` + +**__init__.py file** + +The ``__init__.py`` file contains this code:: + + from operators_loader import load_operators + + +**operators.py file** + +The ``operators.py`` file contains code like this: + +.. literalinclude:: custom_operator_example.py + + +**operators_loader.py file** + +The ``operators_loader.py`` file contains code like this:: + + from custom_plugin import operators + from ansys.dpf.core.custom_operator import record_operator + + + def load_operators(*args): + record_operator(operators.CustomOperator, *args) + + +**common.py file** + +The ``common.py`` file contains the Python routines as classes and functions:: + + #write needed python routines as classes and functions here. + + +Third-party dependencies +------------------------ + +.. include:: custom_operators_deps.rst + + +Assume once again that the name of your plug-in package is ``custom_plugin``. +A folder with this name would contain these files: + +- ``__init__.py`` +- ``operators.py`` +- ``operators_loader.py`` +- ``common.py`` +- ``winx64.zip`` +- ``linx64.zip`` +- ``custom_plugin.xml`` + +**__init__.py file** + +The ``__init__.py`` file contains this code:: + + from operators_loader import load_operators + + +**operators.py file** + +The ``operators.py`` file contains code like this: + +.. literalinclude:: custom_operator_example.py + + +**operators_loader.py file** + +The ``operators_loader.py`` file contains code like this:: + + from custom_plugin import operators + from ansys.dpf.core.custom_operator import record_operator + + + def load_operators(*args): + record_operator(operators.CustomOperator, *args) + + + def load_operators(*args): + record_operator(operators.CustomOperator, *args) + +**common.py file** + +The ``common.py`` file contains the Python routines as classes and functions:: + + #write needed python routines as classes and functions here. + + +**requirements.txt file** + +The ``requirements.txt`` file contains code like this: + +.. literalinclude:: /examples/07-python-operators/plugins/gltf_plugin/requirements.txt + +The ZIP files for Windows and Linux are included as assets: + +- ``winx64.zip`` +- ``linx64.zip`` + + +**custom_plugin.xml file** + +The ``custom_plugin.xml`` file contains code like this: + +.. literalinclude:: custom_plugin.xml + :language: xml + + +Load the plug-in package +------------------------ + +Use the :func:`load_library() ` method to load plug-in package. + +- The first argument is the path to the directory where the plug-in package is located. +- The second argument is ``py_`` where is the name identifying the plug-in package. +- The third argument is the name of the function exposed in the __init__ file for the plug-in package that is used to record operators. + +.. code:: + + dpf.load_library( + r"path/to/plugins/custom_plugin", + "py_my_custom_plugin", #if the load_operators function is defined in path/to/plugins/custom_plugin/__init__.py + "load_operators") + +Use the custom operators +------------------------ + +Once the plugin is loaded, you can instantiate the custom operator: + +.. code:: + + new_operator = dpf.Operator("custom_operator") # if "custom_operator" is what is returned by the ``name`` property + +References +---------- +For more information, see :ref:`ref_custom_operator` in the **API reference** +and :ref:`python_operators` in **Examples**. \ No newline at end of file diff --git a/doc/source/user_guide/custom_plugin.xml b/doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plugin.xml similarity index 100% rename from doc/source/user_guide/custom_plugin.xml rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/custom_plugin.xml diff --git a/doc/source/user_guide/tutorials/custom_operators_and_plugins/index.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/index.rst new file mode 100644 index 00000000000..5ab2a4ecbc1 --- /dev/null +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/index.rst @@ -0,0 +1,87 @@ +.. _ref_tutorials_custom_operators_and_plugins: + +Custom Operators and Plugins +---------------------------- +You can enhance and customize your DPF installation by creating new operators and libraries of operators, called 'plugins'. + +DPF offers multiple development APIs depending on your environment. + +With support for custom operators, PyDPF-Core becomes a development tool offering: + +- **Accessibility:** A simple script can define a basic operator plugin. + +- **Componentization:** Operators with similar applications can be grouped in Python plug-in packages. + +- **Easy distribution:** Standard Python tools can be used to package, upload, and download custom operators. + +- **Dependency management:** Third-party Python modules can be added to the Python package. + +- **Reusability:** A documented and packaged operator can be reused in an infinite number of workflows. + +- **Remotable and parallel computing:** Native DPF capabilities are inherited by custom operators. + +The only prerequisite for creating custom operators is to be familiar with native operators. +For more information, see :ref:`ref_user_guide_operators`. + +.. note:: + + You can create custom operators in CPython using PyDPF-Core for use with DPF in Ansys 2023 R1 and later. + +The following tutorials demonstrate how to develop such plugins using PyDPF-Core (CPython based) and how to use them. + +For comprehensive examples on writing operator plugins, see :ref:`python_operators`. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Create a DPF plugin with a single operator + :link: tutorials_custom_operators_and_plugins_custom_operator + :link-type: ref + :text-align: center + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial shows how to create, load, and use a custom plugin containing a single custom operator. + + +++ + Requires DPF 7.1 or above (2024 R1). + + .. grid-item-card:: Create a DPF plugin with multiple operators + :text-align: center + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial shows how to create, load, and use a custom plugin with multiple operators or with complex routines. + + +++ + Coming soon + + .. grid-item-card:: Create a custom DPF plugin with third-party dependencies using Python + :text-align: center + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial shows how to create a Python plug-in package with third-party dependencies. + + +++ + Coming soon + + .. grid-item-card:: Update PyDPF-Core in the DPF installation + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial shows how to update PyDPF-Core in your DPF installation. + + +++ + Coming soon + +.. toctree:: + :maxdepth: 2 + :hidden: + + custom_operators.rst + diff --git a/doc/source/user_guide/install_ansys_dpf_core_in_ansys.ps1 b/doc/source/user_guide/tutorials/custom_operators_and_plugins/install_ansys_dpf_core_in_ansys.ps1 similarity index 100% rename from doc/source/user_guide/install_ansys_dpf_core_in_ansys.ps1 rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/install_ansys_dpf_core_in_ansys.ps1 diff --git a/doc/source/user_guide/install_ansys_dpf_core_in_ansys.sh b/doc/source/user_guide/tutorials/custom_operators_and_plugins/install_ansys_dpf_core_in_ansys.sh similarity index 100% rename from doc/source/user_guide/install_ansys_dpf_core_in_ansys.sh rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/install_ansys_dpf_core_in_ansys.sh diff --git a/doc/source/user_guide/uninstall_ansys_dpf_core_in_ansys.ps1 b/doc/source/user_guide/tutorials/custom_operators_and_plugins/uninstall_ansys_dpf_core_in_ansys.ps1 similarity index 100% rename from doc/source/user_guide/uninstall_ansys_dpf_core_in_ansys.ps1 rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/uninstall_ansys_dpf_core_in_ansys.ps1 diff --git a/doc/source/user_guide/uninstall_ansys_dpf_core_in_ansys.sh b/doc/source/user_guide/tutorials/custom_operators_and_plugins/uninstall_ansys_dpf_core_in_ansys.sh similarity index 100% rename from doc/source/user_guide/uninstall_ansys_dpf_core_in_ansys.sh rename to doc/source/user_guide/tutorials/custom_operators_and_plugins/uninstall_ansys_dpf_core_in_ansys.sh diff --git a/doc/source/user_guide/tutorials/custom_operators_and_plugins/update_pydpf_core.rst b/doc/source/user_guide/tutorials/custom_operators_and_plugins/update_pydpf_core.rst new file mode 100644 index 00000000000..29f4e1e9273 --- /dev/null +++ b/doc/source/user_guide/tutorials/custom_operators_and_plugins/update_pydpf_core.rst @@ -0,0 +1,30 @@ + +Install module +-------------- + +Once an Ansys-unified installation is complete, you must install the ``ansys-dpf-core`` module in the Ansys +installer's Python interpreter. + +#. Download the script for you operating system: + + - For Windows, download this :download:`PowerShell script `. + - For Linux, download this :download:`Shell script ` + +#. Run the downloaded script for installing with optional arguments: + + - ``-awp_root``: Path to the Ansys root installation folder. For example, the 2023 R1 installation folder ends + with ``Ansys Inc/v231``, and the default environment variable is ``AWP_ROOT231``. + - ``-pip_args``: Optional arguments to add to the ``pip`` command. For example, ``--extra-index-url`` or + ``--trusted-host``. + +If you ever want to uninstall the ``ansys-dpf-core`` module from the Ansys installation, you can do so. + +#. Download the script for your operating system: + + - For Windows, download this :download:`PowerShell script `. + - For Linux, download this :download:`Shell script `. + +#. Run the downloaded script for uninstalling with the optional argument: + + - ``-awp_root``: Path to the Ansys root installation folder. For example, the 2023 R1 installation folder ends + with ``Ansys Inc/v231``, and the default environment variable is ``AWP_ROOT231``. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/data_structures/data_arrays.rst b/doc/source/user_guide/tutorials/data_structures/data_arrays.rst new file mode 100644 index 00000000000..0f4cdd134a3 --- /dev/null +++ b/doc/source/user_guide/tutorials/data_structures/data_arrays.rst @@ -0,0 +1,562 @@ +.. _ref_tutorials_data_arrays: + +=========== +Data Arrays +=========== + +.. |Field| replace:: :class:`Field ` +.. |MeshInfo| replace:: :class:`MeshInfo ` +.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. |PropertyField| replace:: :class:`PropertyField ` +.. |StringField| replace:: :class:`StringField ` +.. |CustomTypeField| replace:: :class:`CustomTypeField ` + +To process your data with DPF, you must format it according to the DPF data model. +You can achieve this either by using DPF data readers on result files, or by using +data to build DPF data storage containers. + +It is important to be aware of how the data is structured in those containers to understand how to create them and how operators process them. + +The data containers can be: + + - **Raw data storage structures**: data arrays (such as a ``Field``) or data maps (such as a ``DataTree``) + - **Collections**: homogeneous groups of labeled raw data storage structures (such as a ``FieldsContainer`` for a group of labeled fields) + +This tutorial presents how to define and manipulate DPF data arrays specifically. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Introduction +------------ + +A data array in DPF usually represents a mathematical field, hence the base name ``Field``. + +Different types of ``Field`` store different data types: + + - a |Field| stores float values + - a |StringField| stores string values + - a |PropertyField| stores integer values + - a |CustomTypeField| stores values of a custom type (among valid numpy.dtype) + +A ``Field`` is always associated to: + + - a ``location``, which defines the type entity the data applies to. + You can check the :class:`locations ` list to know what is available. + Locations related to mesh entities include: ``nodal``, ``elemental``, or ``elemental_nodal``, ``zone``, ``faces``. + Locations related to time, frequency, or mode are ``modal``, ``time_freq``, and ``time_freq_step``. + + - a ``scoping``, which is the list of entity IDs each data point in the ``Field`` relates to. + For example, the ``scoping`` of a ``nodal`` ``Field`` represents a list of node IDs. + It can represent a subset of the ``support`` of the field. + The data in a ``Field`` is ordered the same way as the IDs in its ``scoping``. + + - a ``support``, which is a data container holding information about the model for the type of entity the ``location`` targets. + If the ``location`` relates to mesh entities such as nodes or elements, the ``support`` of the ``Field`` is an object holding data + related to the mesh, called a ``MeshedRegion``. + + - a ``dimensionality``, which gives the structure of the data based on the number of components and dimensions. + Indeed, a DPF ``Field`` can store data for a 3D vector field, a scalar field, a matrix field, + but also store data for a multi-component field (for example, a symmetrical matrix field for each component of the stress field). + + - a ``data`` array, which holds the actual data in a vector, accessed according to the ``dimensionality``. + + +Create fields based on result files +----------------------------------- + +In this tutorial we are going to use the result file from a fluid analysis to showcase the +|Field|, |PropertyField|, and |StringField|. + +The :class:`Model ` class creates and evaluates common readers for the files it is given, +such as a mesh provider, a result info provider, and a streams provider. +It provides dynamically built methods to extract the results available in the files, as well as many shortcuts +to facilitate exploration of the available data. + +.. jupyter-execute:: + + # Import the ansys.dpf.core module as ``dpf`` + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Create a data source targeting the example file + my_data_sources = dpf.DataSources(result_path=examples.download_fluent_axial_comp()["flprj"]) + # Create a model from the data source + my_model = dpf.Model(data_sources=my_data_sources) + # Print information available for the analysis + print(my_model) + +The |MeshInfo| class stores information relative to the |MeshedRegion| of the analysis. +It stores some of its data as fields of strings or fields of integers, which we extract next. + +.. jupyter-execute:: + + # Get the mesh metadata + my_mesh_info = my_model.metadata.mesh_info + print(my_mesh_info) + +.. tab-set:: + + .. tab-item:: Field + + You can obtain a |Field| from a model by requesting a result. + + .. jupyter-execute:: + + # Request the collection of temperature result fields from the model and take the first one. + my_temp_field = my_model.results.temperature.eval()[0] + # Print the field + print(my_temp_field) + + The field is located on nodes since it stores the displacement at each node. + + .. tab-item:: StringField + + You can obtain a |StringField| from a |MeshInfo| by requesting the names of the zones in the model. + + .. jupyter-execute:: + + # Request the name of the face zones in the fluid analysis + my_string_field = my_mesh_info.get_property(property_name="face_zone_names") + # Print the field of strings + print(my_string_field) + + The field is located on zones since it stores the name of each zone. + + .. tab-item:: PropertyField + + You can obtain a |PropertyField| from a |MeshInfo| by requesting the element types in the mesh. + + .. jupyter-execute:: + + # Get the body_face_topology property field + my_property_field = my_mesh_info.get_property(property_name="body_face_topology") + # Print the field of integers + print(my_property_field) + + The field is located on elements since it stores the element type ID for each element. + +Create fields from scratch +-------------------------- + +You can also create a |Field|, |StringField| or |PropertyField| from scratch based on your data. + +.. tab-set:: + + .. tab-item:: Field + + First create a 3D vector field defined for two nodes. + + .. jupyter-execute:: + + # Create a 3D vector field ready to hold data for two entities + # The constructor creates 3D vector fields by default + my_field = dpf.Field(nentities=2) + # Set the data values as a flat vector + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + # Associate the data to nodes + my_field.location = dpf.locations.nodal + # Set the IDs of the nodes the data applies to + my_field.scoping.ids = [1, 2] + # Define the unit (only available for the Field type) + my_field.unit = "m" + # Print the field + print(my_field) + + Now create a 3x3 symmetric matrix field defined for a single element. + + .. jupyter-execute:: + + # Set the nature to symmatrix + my_field = dpf.Field(nentities=1, nature=dpf.natures.symmatrix) + # The symmatrix dimensions defaults to 3x3 + # Set the data values as a flat vector + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + # Associate the data to elements + my_field.location = dpf.locations.elemental + # Set the IDs of the nodes the data applies to + my_field.scoping.ids = [1] + # Define the unit (only available for the Field type) + my_field.unit = "Pa" + # Print the field + print(my_field) + + Now create a 2x3 matrix field defined for a single fluid element face. + + .. jupyter-execute:: + + # Set the nature to matrix and the location to elemental + my_field = dpf.Field(nentities=1, nature=dpf.natures.matrix) + # Set the matrix dimensions to 2x3 + my_field.dimensionality = dpf.Dimensionality(dim_vec=[2, 3], nature=dpf.natures.matrix) + # Set the data values as a flat vector + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + # Associate the data to faces + my_field.location = dpf.locations.faces + # Set the IDs of the face the data applies to + my_field.scoping.ids = [1] + # Define the unit (only available for the Field type) + my_field.unit = "mm" + # Print the field + print(my_field) + + .. tab-item:: StringField + + .. jupyter-execute:: + + # Create a string field with data for two elements + my_string_field = dpf.StringField(nentities=2) + # Set the string values + my_string_field.data = ["string_1", "string_2"] + # Set the location + my_string_field.location = dpf.locations.elemental + # Set the element IDs + my_string_field.scoping.ids = [1, 2] + # Print the string field + print(my_string_field) + + .. tab-item:: PropertyField + + .. jupyter-execute:: + + # Create a property field with data for two modes + my_property_field = dpf.PropertyField(nentities=2) + # Set the data values + my_property_field.data = [12, 25] + # Set the location + my_property_field.location = dpf.locations.modal + # Set the element IDs + my_property_field.scoping.ids = [1, 2] + # Print the property field + print(my_property_field) + +Create a |Field| with the fields_factory +---------------------------------------- + +The :mod:`fields_factory ` module provides helpers to create a |Field|: + +.. tab-set:: + + .. tab-item:: Scalar Field + + Use :func:`create_scalar_field ` to create a scalar field: + + .. jupyter-execute:: + + # Create a scalar field ready to hold data for two entities + # The field is nodal by default + my_field = dpf.fields_factory.create_scalar_field(num_entities=2) + my_field.data = [1.0, 2.0] + my_field.scoping.ids = [1, 2] + # Print the field + print(my_field) + + .. tab-item:: Generic Vector Field + + Use :func:`create_vector_field ` to create a generic vector field: + + .. jupyter-execute:: + + # Create a 2D vector field ready to hold data for two entities + # The field is nodal by default + my_field = dpf.fields_factory.create_vector_field(num_entities=2, num_comp=2) + my_field.data = [1.0, 2.0, 3.0, 4.0] + my_field.scoping.ids = [1, 2] + # Print the field + print(my_field) + + .. tab-item:: 3D Vector Field + + + Use :func:`create_3d_vector_field ` to create a 3D vector field: + + .. jupyter-execute:: + + # Create a 3D vector field ready to hold data for two entities + # The field is nodal by default + my_field = dpf.fields_factory.create_3d_vector_field(num_entities=2) + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + my_field.scoping.ids = [1, 2] + # Print the field + print(my_field) + + .. tab-item:: Generic Matrix Field + + Use :func:`create_matrix_field ` to create a generic matrix field: + + .. jupyter-execute:: + + # Create a 2x3 matrix field ready to hold data for two entities + # The field is nodal by default + my_field = dpf.fields_factory.create_matrix_field(num_entities=2, num_lines=2, num_col=3) + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + my_field.scoping.ids = [1, 2] + # Print the field + print(my_field) + + .. tab-item:: 3x3 Matrix Field + + Use :func:`create_tensor_field ` to create a 3x3 matrix field: + + .. jupyter-execute:: + + # Create a 3x3 matrix field ready to hold data for two entities + # The field is nodal by default + my_field = dpf.fields_factory.create_tensor_field(num_entities=2) + my_field.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + my_field.scoping.ids = [1, 2] + # Print the field + print(my_field) + + .. tab-item:: Overall Field + + Use :func:`create_overall_field ` to create a field with a single value for the whole support: + + .. jupyter-execute:: + + # Create a field storing a value applied to every node in the support + my_field = dpf.fields_factory.create_overall_field(value=1.0) + # Print the field + print(my_field) + + .. tab-item:: Field from Array + + Use :func:`field_from_array ` to create a scalar, 3D vector, or symmetric matrix field directly from a numpy array or a Python list + + .. jupyter-execute:: + + # Create a scalar field from a 1D array or a list + arr = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + my_field = dpf.fields_factory.field_from_array(arr=arr) + # Print the field + print(my_field) + + .. jupyter-execute:: + + # Create a 3D vector field from an array or a list + arr = [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]] + my_field = dpf.fields_factory.field_from_array(arr=arr) + # Print the field + print(my_field) + + .. jupyter-execute:: + + # Create a symmetric matrix field from an array or a list + arr = [[1.0, 2.0, 3.0, 4.0, 5.0, 6.0]] + my_field = dpf.fields_factory.field_from_array(arr=arr) + # Print the field + print(my_field) + + +Access the field metadata +------------------------- + +The metadata associated to a field includes its name, its location, its scoping, +the shape of the data stored, its number of components, and its unit. + +.. tab-set:: + + .. tab-item:: Field + + .. jupyter-execute:: + + # Location of the fields data + my_location = my_temp_field.location + print("location", '\n', my_location,'\n') + + # Fields scoping + my_scoping = my_temp_field.scoping # Location entities type and number + print("scoping", '\n',my_scoping, '\n') + + my_scoping_ids = my_temp_field.scoping.ids # Available ids of locations components + print("scoping.ids", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of the location entities (how many data vectors we have) + my_elementary_data_count = my_temp_field.elementary_data_count + print("elementary_data_count", '\n', my_elementary_data_count, '\n') + + # Components count + # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) + my_component_count = my_temp_field.component_count + print("components_count", '\n', my_component_count, '\n') + + # Size + # Length of the data entire vector (equal to the number of elementary data times the number of components.) + my_field_size = my_temp_field.size + print("size", '\n', my_field_size, '\n') + + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_temp_field.shape + print("shape", '\n', my_shape, '\n') + + # Units + my_unit = my_temp_field.unit + print("unit", '\n', my_unit, '\n') + + .. tab-item:: StringField + + .. jupyter-execute:: + + # Location of the fields data + my_location = my_string_field.location + print("location", '\n', my_location,'\n') + + # StringFields scoping + my_scoping = my_string_field.scoping # Location entities type and number + print("scoping", '\n',my_scoping, '\n') + + my_scoping_ids = my_string_field.scoping.ids # Available ids of locations components + print("scoping.ids", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of the location entities (how many data vectors we have) + my_elementary_data_count = my_string_field.elementary_data_count + print("elementary_data_count", '\n', my_elementary_data_count, '\n') + + # Components count + # Data dimension, here we expect one name by zone + my_component_count = my_string_field.component_count + print("components_count", '\n', my_component_count, '\n') + + # Size + # Length of the data entire array (equal to the number of elementary data times the number of components.) + my_field_size = my_string_field.size + print("size", '\n', my_field_size, '\n') + + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_string_field.shape + print("shape", '\n', my_shape, '\n') + + .. tab-item:: PropertyField + + .. jupyter-execute:: + + # Location of the fields data + my_location = my_property_field.location + print("location", '\n', my_location,'\n') + + # Fields scoping + my_scoping = my_property_field.scoping # Location entities type and number + print("scoping", '\n',my_scoping, '\n') + + my_scoping_ids = my_property_field.scoping.ids # Available ids of locations components + print("scoping.ids", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of the location entities (how many data vectors we have) + my_elementary_data_count = my_property_field.elementary_data_count + print("elementary_data_count", '\n', my_elementary_data_count, '\n') + + # Components count + # Data dimension, we expect to have one id by face that makes part of a body + my_component_count = my_property_field.component_count + print("components_count", '\n', my_component_count, '\n') + + # Size + # Length of the data entire array (equal to the number of elementary data times the number of components.) + my_field_size = my_property_field.size + print("size", '\n', my_field_size, '\n') + + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_property_field.shape + print("shape", '\n', my_shape, '\n') + +Access the field data +--------------------- + +A |Field| object is a client-side representation of the field server-side. +When a remote DPF server is used, the data of the field is also stored remotely. + +To build efficient remote postprocessing workflows, the amount of data exchanged between the client and the remote server has to be minimal. + +This is managed with operators and a completely remote workflow, requesting only the initial data needed to build the workflow, and the output of the workflow. + +It is for example important when interacting with remote data to remember that any PyDPF request for the +``Field.data`` downloads the whole array to your local machine. + +This is particularly inefficient within scripts handling a large amount of data where the request +is made to perform an action locally which could have been made remotely with a DPF operator. + +For example, if you want to know the entity-wise maximum of the field, you should prefer the +``min_max.min_max_by_entity`` operator to the ``array.max()`` method from ``numpy``. + + +Get the complete array +^^^^^^^^^^^^^^^^^^^^^^ + +The field's ``data`` is ordered with respect to its ``scoping ids`` (as shown above). +To access the entire data in the field as an array (``numpy`` array``): + +.. tab-set:: + + .. tab-item:: Field + + .. jupyter-execute:: + + my_data_array = my_temp_field.data + print(my_data_array) + + Note that this array is a genuine, local, numpy array (overloaded by the DPFArray). + + .. jupyter-execute:: + + print(type(my_data_array)) + + .. tab-item:: StringField + + .. jupyter-execute:: + + my_data_array = my_string_field.data + print(my_data_array) + + .. tab-item:: PropertyField + + .. jupyter-execute:: + + my_data_array = my_property_field.data + print(my_data_array) + +Get data for a single entity +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you need to access an individual node or element, request it +using either the :func:`get_entity_data()` or +:func:`get_entity_data_by_id()` methods: + +.. tab-set:: + + .. tab-item:: Field + + .. jupyter-execute:: + + # Get the data from the third element in the field + my_temp_field.get_entity_data(index=3) + + .. jupyter-execute:: + + # Get the data from the element with id 533 + my_temp_field.get_entity_data_by_id(id=533) + + Note that this would correspond to an index of 2 within the + field. Be aware that scoping IDs are not sequential. You would + get the index of element 532 in the field with: + + .. jupyter-execute:: + + # Get index of the element with id 533 + my_temp_field.scoping.index(id=533) + +While these methods are acceptable when requesting data for a few elements +or nodes, they should not be used when looping over the entire array. For efficiency, +a field's data can be recovered locally before sending a large number of requests: + +.. jupyter-execute:: + + # Create a deep copy of the field that can be accessed and modified locally. + with my_temp_field.as_local_field() as f: + for i in range(1,100): + f.get_entity_data_by_id(i) diff --git a/doc/source/user_guide/tutorials/data_structures/index.rst b/doc/source/user_guide/tutorials/data_structures/index.rst new file mode 100644 index 00000000000..6043628416d --- /dev/null +++ b/doc/source/user_guide/tutorials/data_structures/index.rst @@ -0,0 +1,48 @@ +.. _ref_tutorials_data_structures: + +=================== +DPF data structures +=================== + +DPF uses two main data structures to handle data: Fields and Collections. +Therefore, it is important to be aware of how the data is +structured in those containers. + +The data containers can be: + + - **Raw data storage structures**: Data arrays (a ``Field`` for example) or Data Maps (a ``DataTree`` for example) + - **Collections**: a group of same labeled objects from one DPF raw data storage structure (a ``FieldsContainer`` for example, that is a group of ``Fields`` with the same label) + +These tutorials explains how these structures work and how you can manipulate data within. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Data arrays - Fields + :link: ref_tutorials_data_arrays + :link-type: ref + :text-align: center + + This tutorial shows how to create and work with data arrays in PyDPF-Core. + + + .. grid-item-card:: DPF collections + :link: ref_tutorials_language_and_usage + :link-type: ref + :text-align: center + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial shows how to create and work with some DPF collections: + FieldsContainer, MeshesContainer and ScopingsContainer + + +++ + Coming soon + +.. toctree:: + :maxdepth: 2 + :hidden: + + data_arrays.rst diff --git a/doc/source/user_guide/tutorials/distributed_files/index.rst b/doc/source/user_guide/tutorials/distributed_files/index.rst new file mode 100644 index 00000000000..70240e016d9 --- /dev/null +++ b/doc/source/user_guide/tutorials/distributed_files/index.rst @@ -0,0 +1,31 @@ +.. _ref_tutorials_distributed_files: + +============================== +Post-process distributed files +============================== + +These tutorials show how to create workflows on different processes (possibly on different machines) and connect them. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Post process data on distributed processes + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create a custom workflow on distributed processes + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/dpf_server/index.rst b/doc/source/user_guide/tutorials/dpf_server/index.rst new file mode 100644 index 00000000000..efc4e1bfdb2 --- /dev/null +++ b/doc/source/user_guide/tutorials/dpf_server/index.rst @@ -0,0 +1,24 @@ +.. _ref_tutorials_dpf_server: + +========== +DPF server +========== + +This tutorial explains how to manipulate DPF client-server architecture + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Switch between local and remote server + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/export_data/index.rst b/doc/source/user_guide/tutorials/export_data/index.rst new file mode 100644 index 00000000000..c9be8ecd6c7 --- /dev/null +++ b/doc/source/user_guide/tutorials/export_data/index.rst @@ -0,0 +1,28 @@ +.. _ref_tutorials_export_data: + +=========== +Export data +=========== + +Data in DPF can be exported to universal file formats, such as VTK, HDF5, and TXT files. +You can use it to generate TH-plots, screenshots, and animations or to create custom result +plots using the `numpy `_ and `matplotlib `_ packages. + +These tutorials explains how to export data from your manipulations with PyDPF-Core. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: HDF5 export + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst new file mode 100644 index 00000000000..38aea40a368 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -0,0 +1,166 @@ +.. _ref_tutorials_extract_and_explore_results_data: + +================================ +Extract and explore results data +================================ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |get_entity_data| replace:: :func:`get_entity_data()` +.. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` + +This tutorial shows how to extract and explore results data from a result file. + +When you extract a result from a result file DPF stores it in a |Field|. +Thus, this |Field| contains the data of the result associated with it. + +.. note:: + + When DPF-Core returns the |Field| object, what Python actually has is a client-side + representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +Here, we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. +Thus, we get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + + # Extract the displacement results for the last time step + disp_results = model_1.results.displacement.on_last_time_freq.eval() + + # Get the displacement field for the last time step + disp_field = disp_results[0] + + # Print the displacement Field + print(disp_field) + +Extract all the data from a |Field| +----------------------------------- + +You can extract the entire data in a |Field| as: + +- An array (numpy array); +- A list. + +Data as an array +^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as an array + data_array = disp_field.data + + # Print the data as an array + print("Displacement data as an array: ", '\n', data_array) + +Note that this array is a genuine, local, numpy array (overloaded by the DPFArray): + +.. jupyter-execute:: + + # Print the array type + print("Array type: ", type(data_array)) + +Data as a list +^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as a list + data_list = disp_field.data_as_list + # Print the data as a list + print("Displacement data as a list: ", '\n', data_list) + +Extract specific data from a field +---------------------------------- + +If you need to access data for specific entities (node, element ...), you can extract it with two approaches: + +- :ref:`Based on its index ` (data position on the |Field|) by using the |get_entity_data| method; +- :ref:`Based on the entities id ` by using the |get_entity_data_by_id| method. + +The |Field| data is organized with respect to its scoping ids. Note that the element with id=533 +would correspond to an index=2 within the |Field|. + +.. jupyter-execute:: + + # Get the index of the entity with id=533 + index_533_entity = disp_field.scoping.index(id=533) + # Print the index + print("Index entity id=533: ",index_533_entity) + +Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 +position of the |Field| with: + +.. jupyter-execute:: + + # Get the id of the entity with index=533 + id_533_entity = disp_field.scoping.id(index=533) + print("Id entity index=533: ",id_533_entity) + +.. _ref_extract_specific_data_by_index: + +Get the data by the entity index +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the third entity in the field + data_3_entity = disp_field.get_entity_data(index=3) + # Print the data + print("Data entity index=3: ", data_3_entity) + +.. _ref_extract_specific_data_by_id: + +Get the data by the entity id +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the entity with id=533 + data_533_entity = disp_field.get_entity_data_by_id(id=533) + # Print the data + print("Data entity id=533: ", data_533_entity) + +Extract specific data from a field using a loop over the array +-------------------------------------------------------------- + +While the methods above are acceptable when requesting data for a few elements +or nodes, they should not be used when looping over the entire array. For efficiency, +a |Field| data can be recovered locally before sending a large number of requests: + +.. jupyter-execute:: + + # Create a deep copy of the field that can be accessed and modified locally. + with disp_field.as_local_field() as f: + for i in disp_field.scoping.ids[2:50]: + f.get_entity_data_by_id(i) + + # Print the field + print(f) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst new file mode 100644 index 00000000000..32f0fa02285 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -0,0 +1,159 @@ +.. _ref_tutorials_extract_and_explore_results_metadata: + +==================================== +Extract and explore results metadata +==================================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |ResultInfo| replace:: :class:`ResultInfo` + +This tutorial shows how to extract and explore results metadata from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + +Explore the results general metadata +------------------------------------ + +You can explore the general results metadata, before extracting the results, by using +the |ResultInfo| object and its methods. This metadata includes: + +- Analysis type; +- Physics type; +- Number of results; +- Unit system; +- Solver version, date and time; +- Job name; + +.. jupyter-execute:: + + # Define the ResultInfo object + result_info_1 = model_1.metadata.result_info + + # Get the analysis type + analysis_type = result_info_1.analysis_type + # Print the analysis type + print("Analysis type: ",analysis_type, "\n") + + # Get the physics type + physics_type = result_info_1.physics_type + # Print the physics type + print("Physics type: ",physics_type, "\n") + + # Get the number of available results + number_of_results = result_info_1.n_results + # Print the number of available results + print("Number of available results: ",number_of_results, "\n") + + # Get the unit system + unit_system = result_info_1.unit_system + # Print the unit system + print("Unit system: ",unit_system, "\n") + + # Get the solver version, data and time + solver_version = result_info_1.solver_version + solver_date = result_info_1.solver_date + solver_time = result_info_1.solver_time + + # Print the solver version, data and time + print("Solver version: ",solver_version, "\n") + print("Solver date: ", solver_date, "\n") + print("Solver time: ",solver_time, "\n") + + # Get the job name + job_name = result_info_1.job_name + # Print the job name + print("Job name: ",job_name, "\n") + +Explore a result metadata +------------------------- +When you extract a result from a result file DPF stores it in a |Field|. +Thus, this |Field| contains the metadata for the result associated with it. +This metadata includes: + +- Location; +- Scoping (type and quantity of entities); +- Elementary data count (number of entities, how many data vectors we have); +- Components count (vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z)); +- Shape of the data stored (tuple with the elementary data count and the components count); +- Fields size (length of the data entire vector (equal to the number of elementary data times the number of components)); +- Units of the data. + +Here we will explore the metadata of the displacement results. + +Start by extracting the displacement results. + +.. jupyter-execute:: + + # Extract the displacement results + disp_results = model_1.results.displacement.eval() + + # Get the displacement field + disp_field = disp_results[0] + +Explore the displacement results metadata: + +.. jupyter-execute:: + + # Get the location of the displacement data + location = disp_field.location + # Print the location + print("Location: ", location,'\n') + + # Get the displacement Field scoping + scoping = disp_field.scoping + # Print the Field scoping + print("Scoping: ", '\n',scoping, '\n') + + # Get the displacement Field scoping ids + scoping_ids = disp_field.scoping.ids # Available entities ids + # Print the Field scoping ids + print("Scoping ids: ", scoping_ids, '\n') + + # Get the displacement Field elementary data count + elementary_data_count = disp_field.elementary_data_count + # Print the elementary data count + print("Elementary data count: ", elementary_data_count, '\n') + + # Get the displacement Field components count + components_count = disp_field.component_count + # Print the components count + print("Components count: ", components_count, '\n') + + # Get the displacement Field size + field_size = disp_field.size + # Print the Field size + print("Size: ", field_size, '\n') + + # Get the displacement Field shape + shape = disp_field.shape + # Print the Field shape + print("Shape: ", shape, '\n') + + # Get the displacement Field unit + unit = disp_field.unit + # Print the displacement Field unit + print("Unit: ", unit, '\n') \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst new file mode 100644 index 00000000000..83bc9bd49fb --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -0,0 +1,365 @@ +.. _ref_tutorials_import_result_file: + +=========================== +Import a result file in DPF +=========================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |set_result_file_path| replace:: :func:`set_result_file_path() ` +.. |add_file_path| replace:: :func:`add_file_path() ` + +This tutorial shows how to import a result file in DPF. + +There are two approaches to import a result file in DPF: + +- :ref:`Using the DataSources object ` +- :ref:`Using the Model object ` + +.. note:: + + The |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the result file path +--------------------------- + +Both approaches need a file path to be defined. For this tutorial, you can use a result file available in +the |Examples| module. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the .rst result file path + result_file_path_11 = examples.find_static_rst() + + # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files paths + result_file_path_12 = examples.download_msup_files_to_dict() + + + # Print the result files paths + print("Result file path 11:", "\n",result_file_path_11, "\n") + print("Result files paths 12:", "\n",result_file_path_12, "\n") + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the .d3plot result files paths + result_file_path_21 = examples.download_d3plot_beam() + + # Define the .binout result file path + result_file_path_22 = examples.download_binout_matsum() + + # Print the result files paths + print("Result files paths 21:", "\n",result_file_path_21, "\n") + print("Result file path 22:", "\n",result_file_path_22, "\n") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the project .flprj result file path + result_file_path_31 = examples.download_fluent_axial_comp()["flprj"] + + # Define the CFF .cas.h5/.dat.h5 result files paths + result_file_path_32 = examples.download_fluent_axial_comp() + + # Print the result files paths + print("Result file path 31:", "\n",result_file_path_31, "\n") + print("Result files paths 32:", "\n",result_file_path_32, "\n") + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the project .res result file path + result_file_path_41 = examples.download_cfx_mixing_elbow() + + # Define the CFF .cas.cff/.dat.cff result files paths + result_file_path_42 = examples.download_cfx_heating_coil() + + # Print the result files paths + print("Result file path 41:", "\n",result_file_path_41, "\n") + print("Result files paths 42:", "\n",result_file_path_42, "\n") + +.. _ref_import_result_file_data_sources: + +Use a |DataSources| +------------------- + +The |DataSources| object manages paths to their files. Use this object to declare data +inputs for PyDPF-Core APIs. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_11 = dpf.DataSources(result_path=result_file_path_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + In the modal superposition, modal coefficients are multiplied by mode shapes (of a previous modal analysis) + to analyse a structure under given boundary conditions in a range of frequencies. Doing this expansion “on demand” + in DPF instead of in the solver reduces the size of the result files. + + The expansion is recursive in DPF: first the modal response is read. Then, *upstream* mode shapes are found in + the |DataSources|, where they are read and expanded. Upstream refers to a source that provides data to a + particular process. + + To create a recursive workflow add the upstream |DataSources| object, that contains the upstream + data files, to the main |DataSources| object. + + .. jupyter-execute:: + + # Create the main DataSources object + ds_12 = dpf.DataSources() + # Define the main result file path + ds_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') + + # Create the upstream DataSources object with the main upstream file path + upstream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) + # Add the additional upstream file path to the upstream DataSources object + upstream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) + + # Add the upstream DataSources to the main DataSources object + ds_12.add_upstream(upstream_data_sources=upstream_ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + The d3plot file does not contain information related to units. In this case, as the + simulation was run through Mechanical, a ``file.actunits`` file is produced. If this + file is supplemented in the |DataSources|, the units will be correctly fetched for all + results in the file as well as for the mesh. + + Thus, we must use the |set_result_file_path| and the |add_file_path| methods to add the main + and the additional result file to the |DataSources| object. + + .. jupyter-execute:: + + # Create the DataSources object + ds_21 = dpf.DataSources() + + # Define the main result file path + ds_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") + + # Add the additional file path related to the units + ds_21.add_file_path(filepath=result_file_path_21[3], key="actunits") + + **b) `.binout` result file** + + The extension key *`.binout`* is not explicitly specified in the result file. Thus, we use + the |set_result_file_path| method and give the extension key to the *'key'* argument to correctly + add the result file path to the |DataSources| object. + + .. jupyter-execute:: + + # Create the DataSources object + ds_22 = dpf.DataSources() + + # Define the path to the result file + # Use the ``key`` argument and give the file extension key + ds_22.set_result_file_path(filepath=result_file_path_22, key="binout") + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_31 = dpf.DataSources(result_path=result_file_path_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object and explicitly give the *first* extension key to + their *'key'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + ds_32 = dpf.DataSources() + + # Define the path to the main result file + # Use the ``key`` argument and give the first extension key + ds_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") + + .. tab-item:: CFX + + **a) `.res` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_41 = dpf.DataSources(result_path=result_file_path_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object. Also, you must explicitly give the *first* extension keys to + the *'key'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + ds_42 = dpf.DataSources() + + # Define the path to the main result file + # Use the ``key`` argument and give the first extension key + ds_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + +.. _ref_import_result_file_model: + +Use a |Model| +------------- + +The :class:`Model ` class creates and evaluates common readers for the files it is given, +such as a mesh provider, a result info provider, and a streams provider. +It provides dynamically built methods to extract the results available in the files, as well as many shortcuts +to facilitate exploration of the available data. + +To create a |Model| you can provide to the *'data_sources'* argument.: + +- The result file path, in the case you are working with a single result file that has an explicit extension key; +- A |DataSources| object. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_11 = dpf.Model(data_sources=result_file_path_11) + + # Create the model with the DataSources object + model_12 = dpf.Model(data_sources=ds_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_13 = dpf.Model(data_sources=ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_21 = dpf.Model(data_sources=ds_21) + + **b) `.binout` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_22 = dpf.Model(data_sources=ds_22) + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_31 = dpf.Model(data_sources=result_file_path_31) + + # Create the model with the DataSources object + model_32 = dpf.Model(data_sources=ds_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_33 = dpf.Model(data_sources=ds_32) + + .. tab-item:: CFX + + **a) `.res` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_41 = dpf.Model(data_sources=result_file_path_41) + + # Create the model with the DataSources object + model_42 = dpf.Model(data_sources=ds_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_43 = dpf.Model(data_sources=ds_42) + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst new file mode 100644 index 00000000000..b9607fa4e2c --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -0,0 +1,83 @@ +.. _ref_tutorials_import_data: + +=========== +Import Data +=========== + +These tutorials demonstrate how to represent data in DPF: either from manual input either +form simulation result files. + +From user input +*************** + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Load custom data + :link: ref_tutorials_load_custom_data + :link-type: ref + :text-align: center + + Learn how to build DPF data storage structures from custom data. + +From result files +***************** + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Import a result file in DPF + :link: ref_tutorials_import_result_file + :link-type: ref + :text-align: center + + This tutorial shows how to import a result file in DPF. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract and explore results metadata + :link: ref_tutorials_extract_and_explore_results_metadata + :link-type: ref + :text-align: center + + This tutorial shows how to extract and explore results metadata (analysis type, + physics type, unit system...) from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract and explore results data + :link: ref_tutorials_extract_and_explore_results_data + :link-type: ref + :text-align: center + + This tutorial shows how to extract and explore results data from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Narrow down data + :link: reft_tutorials_narrow_down_data + :link-type: ref + :text-align: center + + This tutorial explains how to scope (get a spatial and/or temporal subset of + the simulation data) your results. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + load_custom_data.rst + import_result_file.rst + extract_and_explore_results_metadata.rst + extract_and_explore_results_data.rst + narrow_down_data.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/load_custom_data.rst b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst new file mode 100644 index 00000000000..b6713827586 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst @@ -0,0 +1,697 @@ +.. _ref_tutorials_load_custom_data: + +======================= +Load custom data in DPF +======================= + +.. include:: ../../../links_and_refs.rst +.. |Field.append| replace:: :func:`append()` +.. |Field.data| replace:: :attr:`Field.data` +.. |fields_factory| replace:: :mod:`fields_factory` +.. |fields_container_factory| replace:: :mod:`fields_container_factory` +.. |location| replace:: :class:`location` +.. |nature| replace:: :class:`nature` +.. |dimensionality| replace:: :class:`dimensionality` +.. |Field.dimensionality| replace:: :func:`Field.dimensionality` +.. |Field.location| replace:: :func:`Field.location` +.. |Field.scoping| replace:: :func:`Field.scoping` +.. |field_from_array| replace:: :func:`field_from_array()` +.. |create_scalar_field| replace:: :func:`create_scalar_field()` +.. |create_vector_field| replace:: :func:`create_vector_field()` +.. |create_3d_vector_field| replace:: :func:`create_3d_vector_field()` +.. |create_matrix_field| replace:: :func:`create_matrix_field()` +.. |create_tensor_field| replace:: :func:`create_tensor_field()` +.. |over_time_freq_fields_container| replace:: :func:`over_time_freq_fields_container()` + +This tutorial shows how to represent your custom data in DPF data storage structures. + +To import you custom data in DPF, you must create a DPF data structure to store it. +DPF uses |Field| and |FieldsContainer| objects to handle data. The |Field| is a homogeneous array +and a |FieldsContainer| is a labeled collection of |Field|. For more information on DPF data structures +such as the |Field| and their use check the :ref:`ref_tutorials_data_structures` tutorials section. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the data +--------------- + +In this tutorial, we create different Fields from data stored in Python lists. + +Create the python lists with the data to be *set* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields (lists with 1 and 2 dimensions) + data_1 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + data_2 = [[12.0, 7.0, 8.0], [ 9.0, 31.0, 1.0]] + + # Data for the vector Fields (lists with 1 and 2 dimensions) + data_3 = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + data_4 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 9.0, 7.0, 8.0, 10.0] + data_5 = [[8.0, 4.0, 3.0], [31.0, 5.0, 7.0]] + + # Data for the matrix Fields + data_6 = [3.0, 2.0, 1.0, 7.0] + data_7 = [15.0, 3.0, 9.0, 31.0, 1.0, 42.0, 5.0, 68.0, 13.0] + data_8 = [[12.0, 7.0, 8.0], [ 1.0, 4.0, 27.0], [98.0, 4.0, 6.0]] + +Create the python lists with the data to be *appended* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields + data_9 = [24.0] + + # Data for the vector Fields + data_10 = [47.0, 33.0, 5.0] + + # Data for the matrix Fields + data_11 = [8.0, 2.0, 4.0, 64.0, 32.0, 47.0, 11.0, 23.0, 1.0] + + +Create the Fields +----------------- + +In this tutorial, we explain how to create the following Fields: + +- Scalar Field; +- Vector Field; +- Matrix Field. + +.. note:: + + A |Field| must always be given: + + - A |location| and a |Scoping|. + + Here, we create Fields in the default *'Nodal'* |location|. Thus, each entity (here, the nodes) must + have a |Scoping| id, that can be defined in a random or in a numerical order: + + - If you want to *set* a data array to the |Field|, you must previously set the |Scoping| ids using the |Field.scoping| method. + - If you want to *append* an entity with a data array to the |Field|, you don't need to previously set the |Scoping| ids. + + - A |nature| and a |dimensionality| (number of data components for each entity). They must respect the type and size of the + data to be stored in the |Field|. + +Import the PyDPF-Core library +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +First, import the PyDPF-Core library. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Define the Fields sizing +^^^^^^^^^^^^^^^^^^^^^^^^ + +The second step consists in defining the Fields dimensions. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Here, we create one |Field| with 6 scalar. Thus, 6 entities with one |Scoping| id each. + + .. jupyter-execute:: + + # Define the number of entities + num_entities_1 = 6 + + You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimensionality|. + + .. tab-item:: Vector fields + + Here, we create: + + - One |Field| with 2 vectors (thus, 2 entities) of 3 components each (3D vector |Field|); + - One |Field| with 2 vectors (thus, 2 entities) of 5 components each (5D vector |Field|); + + .. jupyter-execute:: + + # Define the number of entities + num_entities_2 = 2 + + You must ensure that these Fields have a *'vector'* |nature| and the corresponding |dimensionality| + (*'3D'* and *'5D'*). + + .. tab-item:: Matrix fields + + Here, we create: + + - One Field with 1 matrix (thus, 1 entity) of 2 lines and 2 columns; + - Two Fields with 1 matrix (thus, 1 entity) of 3 lines and 3 columns (tensor). + + .. jupyter-execute:: + + # Define the number of entities + num_entities_3 = 1 + + You must ensure that these Fields have a *'matrix'* |nature| and the corresponding |dimensionality|. + +Create the Fields objects +^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can create the Fields using two approaches: + +- :ref:`Instantianting the Field object`; +- :ref:`Using the fields_factory module`. + +.. _ref_create_field_instance: + +Create a |Field| by an instance of this object +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. tab-set:: + + .. tab-item:: Scalar fields + + .. jupyter-execute:: + + # Define the number of entities + num_entities_1 = 6 + + You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimensionality|. + + For this approach, the default |nature| of the |Field| object is *'vector'*. You can modify it directly with the + *'nature'* argument or with the |Field.dimensionality| method. + + Create the scalar |Field| and use the *'nature'* argument. + + .. jupyter-execute:: + + # Instanciate the Field + field_11 = dpf.Field(nentities=num_entities_1, nature=dpf.common.natures.scalar) + + # Set the scoping ids + field_11.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_11, '\n') + + Create the scalar |Field| and use the |Field.dimensionality| method. + + .. jupyter-execute:: + + # Instanciate the Field + field_12 = dpf.Field(nentities=num_entities_1) + + # Use the Field.dimensionality method + field_12.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_12.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + + .. tab-item:: Vector fields + + Here, we create: + + - One |Field| with 2 vectors (thus, 2 entities) of 3 components each (3D vector |Field|); + - One |Field| with 2 vectors (thus, 2 entities) of 5 components each (5D vector |Field|); + + .. jupyter-execute:: + + # Define the number of entities + num_entities_2 = 2 + + You must ensure that these Fields have a *'vector'* |nature| and the corresponding |dimensionality| (*'3D'* and *'5D'*). + + For this approach, the default |nature| is *'vector'* and the default |dimensionality| is *'3D'*. So for the second vector + |Field| you must set a *'5D'* |dimensionality| using the |Field.dimensionality| method. + + Create the *'3D'* vector Field. + + .. jupyter-execute:: + + # Instantiate the Field + field_21 = dpf.Field(nentities=num_entities_2) + + # Set the scoping ids + field_21.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_21, '\n') + + Create the *'5D'* vector Field. + + .. jupyter-execute:: + + # Instantiate the Field + field_31 = dpf.Field(nentities=num_entities_2) + + # Use the Field.dimensionality method + field_31.dimensionality = dpf.Dimensionality([5]) + + # Set the scoping ids + field_31.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field (5D): ", '\n',field_31, '\n') + +.. _ref_create_field_fields_factory: + +Create a |Field| using the |fields_factory| module +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. tab-set:: + + .. tab-item:: Scalar fields + + You can use two functions from the |fields_factory| module to create a scalar |Field|: + + - The |create_scalar_field| function; + - The |field_from_array| function. + + **Create the Field using the create_scalar_field function** + + For this approach, the default |nature| of the |Field| object is *'scalar'* and the default |dimensionality| is *'1D'*. + Thus, you just have to use the |create_scalar_field| function to create a scalar |Field|. + + .. jupyter-execute:: + + # Create the scalar Field + field_13 = dpf.fields_factory.create_scalar_field(num_entities=num_entities_1) + + # Set the scoping ids + field_13.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_13, '\n') + + **Create the Field using the field_from_array function** + + Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is + used as an input of the |field_from_array| function. + + This function gets an Numpy array or Python list of either: + + - 1 dimension (one array). In this case, you get directly a scalar |Field|; + - 2 dimensions (one array containing multiple arrays with 3 components each). In the is case, you get a 3D vector |Field|. + Thus, you have to change the |Field| |dimensionality| using the |Field.dimensionality| method. + + Create the scalar Field with an 1 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_14 = dpf.fields_factory.field_from_array(arr=data_1) + + # Set the scoping ids + field_14.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_14, '\n') + + Create the scalar Field with a 2 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_15 = dpf.fields_factory.field_from_array(arr=data_2) + + # Use the |Field.dimensionality| method + field_15.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_15.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field (b): ", '\n',field_15, '\n') + + + .. tab-item:: Vector fields + + You can use three functions from the |fields_factory| module to create a vector |Field|: + + - The |create_vector_field| function; + - The |create_3d_vector_field| function (Specifically to create a 3D vector |Field| + (a vector |Field| with 3 components for each entity)); + - The |field_from_array| function. + + **Create the Field using the create_vector_field() function** + + For this approach, the default |nature| is *'vector'*. To define the |dimensionality| you must use the *'num_comp'* argument. + + Create the *'3D'* vector Field. + + .. jupyter-execute:: + + # Use the create_vector_field function + field_22 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=3) + + # Set the scoping ids + field_22.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_22, '\n') + + Create the *'5D'* vector Field. + + .. jupyter-execute:: + + # Use the create_vector_field function + field_32 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=5) + + # Set the scoping ids + field_32.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field : ", '\n',field_32, '\n') + + **Create a 3d vector Field using the create_3d_vector_field() function** + + For this approach, the default |nature| is *'vector'* and the |dimensionality| is *'3D'*. Thus, you just + have to use the |create_3d_vector_field| function to create a 3D vector |Field|. + + .. jupyter-execute:: + + # Create the 3d vector Field + field_25 = dpf.fields_factory.create_3d_vector_field(num_entities=num_entities_2) + # Set the scoping ids + field_25.scoping.ids = range(num_entities_2) + + # Print the Field + print("Vector Field (3D): ", '\n',field_25, '\n') + + **Create the Field using the field_from_array() function** + + Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is + used as an input of the |field_from_array| function. + + This function gets an Numpy array or Python list of either: + + - 1 dimension (one array). In this case, you have to change the |Field| |dimensionality| using the + |Field.dimensionality| method. + - 2 dimensions (one array containing multiple arrays with 3 components). In the is case, you get a 3D vector |Field|. + + .. note:: + + The |Field| must always assure a homogeneous shape. The shape is a tuple with the number of elementary data and the + number of components. + + So, for the *'5D* vector |field| we would want a shape of (10,5). Nevertheless, the 2 dimensions data vector we + defined ("data_5") has a elementary data count of 6 (2*3). Thus, we cannot define the *'5D'* vector |Field| because it would + have a (6,5) shape. + + Create the *'3D'* vector Field with an 1 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_23 = dpf.fields_factory.field_from_array(arr=data_3) + + # Use the Field.dimensionality method + field_23.dimensionality = dpf.Dimensionality([3]) + + # Set the scoping ids + field_23.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_23, '\n') + + Create the *'3D'* vector Field and give a 2 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_24 = dpf.fields_factory.field_from_array(arr=data_5) + + # Set the scoping ids + field_24.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_24, '\n') + + .. tab-item:: Matrix fields + + You can create a matrix |Field| using the |create_matrix_field| function from the |fields_factory| module. + + The default |nature| here is *'matrix'*. Thus, you only have to define the matrix |dimensionality| using the + *'num_lines'* and *'num_col'* arguments. + + Create the (2,2) matrix Field. + + .. jupyter-execute:: + + # Use the create_matrix_field function + field_41 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=2, num_col=2) + + # Set the scoping ids + field_41.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field (2,2) : ", '\n',field_41, '\n') + + Create the (3,3) matrix Fields. + + .. jupyter-execute:: + + # Use the create_matrix_field function + field_51 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + field_52 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + + # Set the scoping ids + field_51.scoping.ids = range(num_entities_3) + field_52.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field 1 (3,3) : ", '\n',field_51, '\n') + print("Matrix Field 2 (3,3) : ", '\n',field_52, '\n') + +Set data to the Fields +---------------------- + +To set a data array to a |Field| use the |Field.data| method. The |Field| |Scoping| defines how the data is ordered. +For example: the first id in the scoping identifies to which entity the first data entity belongs to. + +The data can be in a 1 dimension (one array) or 2 dimensions (one array containing multiple arrays) +Numpy array or Python list. When attributed to a |Field|, these data arrays are reshaped to respect +the |Field| definition. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Set the data from a 1 dimensional array to the scalar Field. + + .. jupyter-execute:: + + # Set the data + field_11.data = data_1 + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_11.data, '\n') + + Set the data from a 2 dimensional array to the scalar Field. + + .. jupyter-execute:: + + # Set the data + field_12.data = data_2 + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_12.data, '\n') + + .. tab-item:: Vector fields + + Set the data from a 1 dimensional array to the *'3D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_21.data = data_3 + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + + Set the data from a 1 dimensional array to the *'5D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_31.data = data_4 + + # Print the Field + print("Vector Field: ", '\n',field_31, '\n') + + # Print the Fields data + print("Data vector Field : ", '\n',field_31.data, '\n') + + Set the data from a 2 dimensional array to the *'3D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_22.data = data_5 + + # Print the Field + print("Vector Field: ", '\n',field_22, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_22.data, '\n') + + .. tab-item:: Matrix fields + + Set the data from a 1 dimensional array to the (2,2) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_41.data = data_6 + + # Print the Field + print("Matrix Field: ", '\n',field_41, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_41.data, '\n') + + Set the data from a 1 dimensional array to the (3,3) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_51.data = data_7 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + + Set the data from a 2 dimensional array to the (3,3) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_52.data = data_8 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + +Append data to the Fields +------------------------- + +You can append a data array to a |Field|, this means adding a new entity with the new data in the |Field|. You have to +give the |Scoping| id that this entities will have. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Append data to a scalar |Field|. + + .. jupyter-execute:: + + # Append the data + field_11.append(scopingid=6, data=data_9) + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field: ", '\n',field_11.data, '\n') + + .. tab-item:: Vector fields + + Append data to a vector |Field|. + + .. jupyter-execute:: + + # Append the data + field_21.append(scopingid=2, data=data_10) + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + + + .. tab-item:: Matrix fields + + Append data to a matrix |Field|. + + .. jupyter-execute:: + + # Append the data + field_51.append(scopingid=1, data=data_11) + + # Print the Field + print("VMatrix Field : ", '\n',field_51, '\n') + + # Print the Fields data + print("Data Matrix Field: ", '\n',field_51.data, '\n') + +Create a |FieldsContainer| +-------------------------- + +A |FieldsContainer| is a collection of |Field| ordered by labels. Each |Field| of the |FieldsContainer| has +an ID for each label. These ids allow splitting the fields on any criteria. + +The most common |FieldsContainer| have the label *'time'* with ids corresponding to time sets. The label *'complex'*, +which is used in a harmonic analysis for example, allows real parts (id=0) to be separated from imaginary parts (id=1). + +For more information on DPF data structures, see the :ref:`ref_tutorials_data_structures` tutorials section. + +You can create a |FieldsContainer| by: + +- :ref:`Instantiating the FieldsContainer object`; +- :ref:`Using the fields_container_factory module`. + +.. _ref_fields_container_instance: + +Create a |FieldsContainer| by an instance of this object +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After defining a |FieldsContainer| by an instance of this object you need to set the labels. Here, we define +Fields over time steps labels. So, when you add a |Field| to the |FieldsContainer| you must specify the time step id +it belongs to. + +.. jupyter-execute:: + + # Create the FieldsContainer object + fc_1 = dpf.FieldsContainer() + + # Define the labels + fc_1.add_label(label="time") + + # Add the Fields + fc_1.add_field(label_space={"time": 0}, field=field_21) + fc_1.add_field(label_space={"time": 1}, field=field_31) + + # Print the FieldsContainer + print(fc_1) + +.. _ref_fields_container_factory_module: + +Create a |FieldsContainer| with the |fields_container_factory| module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The |fields_container_factory| module contains functions that create a |FieldsContainer| with predefined +labels. Here, we use the |over_time_freq_fields_container| function that create a |FieldsContainer| with a *'time'* +label. + +.. jupyter-execute:: + + # Create the FieldsContainer + fc_2 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_21, field_31]) + + # Print the FieldsContainer + print(fc_2) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst new file mode 100644 index 00000000000..b1a4dac9940 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -0,0 +1,456 @@ +.. _reft_tutorials_narrow_down_data: + +================ +Narrow down data +================ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |location| replace:: :class:`location` +.. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` +.. |mesh_scoping_factory| replace:: :mod:`mesh_scoping_factory` +.. |displacement| replace:: :class:`result.displacement ` +.. |Model.results| replace:: :func:`Model.results ` +.. |result op| replace:: :mod:`result` +.. |rescope| replace:: :class:`rescope ` +.. |from_mesh| replace:: :class:`from_mesh ` +.. |extract_scoping| replace:: :class:`extract_scoping ` +.. |scoping_by_sets| replace:: :func:`scoping_by_sets() ` +.. |nodal_scoping| replace:: :func:`nodal_scoping() ` +.. |MeshedRegion.elements| replace:: :func:`MeshedRegion.elements` +.. |MeshedRegion.nodes| replace:: :func:`MeshedRegion.nodes` +.. |Elements.scoping| replace:: :func:`Elements.scoping` +.. |Nodes.scoping| replace:: :func:`Nodes.scoping` +.. |Field.scoping| replace:: :func:`Field.scoping` +.. |Model.metadata| replace:: :func:`Model.metadata` +.. |Metadata| replace:: :class:`Metadata ` +.. |Metadata.time_freq_support| replace:: :func:`Metadata.time_freq_support` +.. |FieldsContainer.time_freq_support| replace:: :func:`FieldsContainer.time_freq_support` +.. |Field.time_freq_support| replace:: :func:`Field.time_freq_support` +.. |TimeFreqSupport.time_frequencies| replace:: :func:`TimeFreqSupport.time_frequencies` + +This tutorial explains how to scope your results over time and mesh domains. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Understanding the scope +----------------------- + +To begin the workflow set up, you need to establish the ``scoping``, that is +a spatial and/or temporal subset of the simulation data. + +The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. +To do so in DPF, you use the |Scoping| object. You can retrieve all the time steps available for +a result, but you can also filter them. + +.. note:: + + Scoping is important because when DPF-Core returns the |Field| object, what Python actually has + is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. For more information on the DPF data storage + structures see :ref:`ref_tutorials_data_structures`. + +In conclusion, the essence of a scoping is to specify a set of time or mesh entities by defining a range of IDs: + +.. image:: ../../../images/drawings/scoping-eg.png + :align: center + +Create a |Scoping| object from scratch +-------------------------------------- + +First, import the necessary PyDPF-Core modules. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Then, use the available APIs to create a |Scoping| object. It can be created by: + +- :ref:`Instantiating the Scoping class`; +- :ref:`Using the scoping factory `. + + +.. _ref_create_scoping_instance_object: + +Instanciate a |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^ + +Create a time and a mesh |Scoping| by instantiating the |Scoping| object. Use the *'ids'* and *'location'* arguments +and give the entities ids and |location| of interest. + +.. tab-set:: + + .. tab-item:: Time scoping + + A time location in DPF is a |TimeFreqSupport| object. Thus, we chose a *'time_freq'* |location| and target + a set of time by their ids. + + .. jupyter-execute:: + + + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_1 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_1 = dpf.Scoping(ids=time_list_1, location=dpf.locations.time_freq) + + + .. tab-item:: Mesh scoping + + Here, we chose a nodal |location| and target a set of nodes by their ids. + + .. jupyter-execute:: + + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_1 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_1 = dpf.Scoping(ids=nodes_ids_1, location=dpf.locations.nodal) + +.. _ref_create_scoping_scoping_factory: + +Use the scoping factory module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Create a |Scoping| object by using the |time_freq_scoping_factory| module for a temporal |Scoping| +and the |mesh_scoping_factory| module for a spatial |Scoping|. + +.. tab-set:: + + .. tab-item:: Time scoping + + Here, we use the |scoping_by_sets| function so we can have different time steps in the |Scoping|. This function + gives a |Scoping| on a *'time_freq'* |location|. + + .. jupyter-execute:: + + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_2 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=time_list_2) + + + .. tab-item:: Mesh scoping + + Here, we use the |nodal_scoping| function so we have a mesh |Scoping| in a nodal |location|. + + .. jupyter-execute:: + + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_2 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=nodes_ids_2) + +Extract a |Scoping| +------------------- + +You can extract |Scoping| from some DPF objects. They are: + +.. tab-set:: + + .. tab-item:: Time scoping + + - A |Model|; + - A |FieldsContainer| ; + - A |Field|. + + + .. tab-item:: Mesh scoping + + - A |MeshedRegion|; + - A |FieldsContainer| ; + - A |Field|. + +Define the objects +^^^^^^^^^^^^^^^^^^ + +First, import a result file and create a |Model|. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +.. jupyter-execute:: + + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + model_1 = dpf.Model(data_sources=ds_1) + +From this result file we extract: + +- The mesh (in DPF a mesh is the |MeshedRegion| object); +- The displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. Additionally, + we can get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Get the MeshedRegion + meshed_region_1 = model_1.metadata.meshed_region + + # Get a FieldsContainer with the displacement results + disp_fc = model_1.results.displacement.on_all_time_freqs.eval() + + # Get a Field from the FieldsContainer + disp_field = disp_fc[0] + +Extract the time |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Extract the time |Scoping| is extracting the scoping of the time frequencies from the |TimeFreqSupport| +of the DPF object. + +.. tab-set:: + + .. tab-item:: From the Model + + You can extract the |TimeFreqSupport| available for the results by accessing the |Model| |Metadata|. + Thus, you must use the |Model.metadata| method. From the |Metadata|, you can get the |TimeFreqSupport| + by using the |Metadata.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_1 = model_1.metadata.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + For this approach, the time frequencies are given in a *'TimeFreq_sets'* location. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_1 = tfs_1.time_frequencies + + # Extract the time scoping + time_scop_1 = t_freqs_1.scoping + + #Print the time scoping + print(time_scop_1) + + + .. tab-item:: From the FieldsContainer + + You can extract the |TimeFreqSupport| of each |Field| in the |FieldsContainer| by using the + |FieldsContainer.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_2 = disp_fc.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_2 = tfs_2.time_frequencies + + # Extract the time scoping + time_scop_2 = t_freqs_2.scoping + + #Print the time scoping + print(time_scop_2) + + + .. tab-item:: From the Field + + You can extract the |TimeFreqSupport| of a |Field| by using the |Field.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_3 = disp_field.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_3 = tfs_1.time_frequencies + + # Extract the time scoping + time_scop_3 = t_freqs_3.scoping + + #Print the time scoping + print(time_scop_3) + +Extract the mesh |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. tab-set:: + + .. tab-item:: From the MeshedRegion + + You can extract the mesh |Scoping| from a |MeshedRegion| using: + + - The |from_mesh| operator; + - The |Elements| object; + - The |Nodes| object. + + **Use the from_mesh operator** + + Extract the mesh |Scoping| from the |MeshedRegion| using the |from_mesh| operator. It gets the + |Scoping| for the entire mesh with a *'nodal'* location. You can also get an *'elemental'* location + by using the *'requested_location'* argument. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_3 = ops.scoping.from_mesh(mesh=meshed_region_1).eval() + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_3, "\n") + + **Use the Elements object** + + You can obtain the |Elements| object from a given |MeshedRegion| by using the |MeshedRegion.elements| + method. You can extract the mesh |Scoping| from the |Elements| object by using the |Elements.scoping| method. + It gets the |Scoping| for the entire mesh with a *'elemental'* location. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_4 = meshed_region_1.elements.scoping + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_4, "\n") + + **Use the Nodes object** + + You can obtain the |Nodes| object from a given |MeshedRegion| by using the |MeshedRegion.nodes| + method. You can extract the mesh |Scoping| from the |Nodes| object by using the |Nodes.scoping| method. + It gets the |Scoping| for the entire mesh with a *'nodal'* location. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_5 = meshed_region_1.nodes.scoping + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_5, "\n") + + + .. tab-item:: From the FieldsContainer + + Extract the mesh Scoping from the |FieldsContainer| using the |extract_scoping| operator. This operator gets the mesh + |Scoping| for each |Field| in the |FieldsContainer|. Thus, you must specify the output as a |ScopingsContainer|. + + .. jupyter-execute:: + + # Define the extract_scoping operator + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=disp_fc) + + # Get the mesh Scopings from the operators output + mesh_scoping_6 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + + # Print the mesh Scopings + print("Scoping from FieldsContainer", "\n", mesh_scoping_6, "\n") + + .. tab-item:: From the Field + + You can extract the mesh |Scoping| from a |Field| using: + + - The |extract_scoping| operator; + - The |Field.scoping| method. + + **Use the extract_scoping operator** + + This operator gets the mesh |Scoping| from the result |Field|. This means it gets the |Scoping| + where the result is defined at. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_7 = ops.utility.extract_scoping(field_or_fields_container=disp_field).eval() + + # Print the mesh Scoping + print("Scoping from Field ", "\n", mesh_scoping_7, "\n") + + **Use the Field.scoping method** + + This method gets the mesh |Scoping| from the result |Field|. This means it gets the |Scoping| + where the result is defined at. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_8 = disp_field + + # Print the mesh Scoping + print("Scoping from Field", "\n", mesh_scoping_8, "\n") + +Use a |Scoping| +--------------- + +The |Scoping| object can be used : + +- :ref:`When extracting a result`; +- :ref:`After extracting a result`. + +.. _ref_use_scoping_when_extracting: + +Extract and scope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can extract and scope a result using the |Model.results| method or the |result op| operator inputs. +Those two approaches handle |Result| objects. Thus, to scope the results when extracting them you use +the *'time_scoping'* and *'mesh_scoping'* arguments and give the Scopings of interest. + +Here, we extract and scope the displacement results. + +.. jupyter-execute:: + + # Extract and scope the result using the Model.results method + disp_model = model_1.results.displacement(time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() + + # Extract and scope the results using the result.displacement operator + disp_op = ops.result.displacement(data_sources=ds_1, time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() + + # Print the displacement results + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + +.. _ref_use_scoping_after_extracting: + +Extract and rescope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The mesh |Scoping| can be changed after the result extraction or manipulation by using the +|rescope| operator. It takes a |Field| or |FieldsContainer| that contains the results data +and rescope them. + +Here, we rescope the displacement results. + +.. jupyter-execute:: + + # Extract the results for the entire mesh + disp_all_mesh = model_1.results.displacement.eval() + + # Rescope the displacement results to get the data only for a specific set of nodes + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=mesh_scoping_1).eval() + + # Print the displacement results for the entire mesh + print("Displacement results for the entire mesh", "\n", disp_all_mesh, "\n") + + # Print the displacement results for the specific set of nodes + print("Displacement results rescoped ", "\n", disp_rescope, "\n") + + + + diff --git a/doc/source/user_guide/tutorials/index.rst b/doc/source/user_guide/tutorials/index.rst new file mode 100644 index 00000000000..e600bb8ca6f --- /dev/null +++ b/doc/source/user_guide/tutorials/index.rst @@ -0,0 +1,145 @@ +.. _ref_tutorials: + +Tutorials +--------- + +The tutorials cover specifics features with detailed demonstrations to help +understanding the fundamental PyDPF-Core functionalities and clarify some concepts. +They are designed to teach how to perform a task, providing explanations at each stage. + +It helps to have a Python interpreter for hands-on experience, but all code examples are +executed, so the tutorial can be read off-line as well. + +For a complete description of all the objects and modules, see the :doc:`API reference <../../api/index>` +section. + +:fa:`person-running` Beginner's guide +************************************* + +New to PyDPF-Core? Check our beginner's tutorials. They offer an overview +of basic features and concepts so you can start coding right away. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Data structures + :link: ref_tutorials_data_structures + :link-type: ref + :text-align: center + + Learn about the different data structures available in DPF. + + .. grid-item-card:: Post-processing data basics + :link: ref_tutorials_processing_basics + :link-type: ref + :text-align: center + + Follow a basic post-processing procedure with data transformation, + visualization and analysis using PyDPf-Core. + +:fa:`book-open-reader` Common topics +************************************ + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Importing data + :link: ref_tutorials_import_data + :link-type: ref + :text-align: center + + Understand how to represent data in DPF: either from manual input either form result files. + + .. grid-item-card:: Meshes + :link: ref_tutorials_mesh + :link-type: ref + :text-align: center + + Learn how to interact with meshes in PyDPF-Core. + + .. grid-item-card:: Processing data with operators and workflows + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + Learn how to use operators to process your data and build workflows. + + +++ + Coming soon + + .. grid-item-card:: Exporting data + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + Discover the best ways to export data from your manipulations with PyDPF-Core. + + +++ + Coming soon + + .. grid-item-card:: Plotting + :link: ref_tutorials_plot + :link-type: ref + :text-align: center + + Explore the different approaches to visualise the data in plots. + + .. grid-item-card:: Animations + :link: ref_tutorials_animate + :link-type: ref + :text-align: center + + Explore the different approaches to visualise the data in an animation. + + .. grid-item-card:: Mathematical operations + :link: ref_tutorials_mathematics + :link-type: ref + :text-align: center + + Learn how to perform mathematical operations on data structures. + + .. grid-item-card:: Custom Python operator and plugin + :link: ref_tutorials_custom_operators_and_plugins + :link-type: ref + :text-align: center + + Discover how to enhance DPF capabilities with custom operators and plugins. + + .. grid-item-card:: Processing distributed files + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + Learn how to use PyDPF-Core with distributed result files. + + +++ + Coming soon + + .. grid-item-card:: Managing local and remote servers + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + Learn about the DPF client-server architecture and management of local and remote servers. + + +++ + Coming soon + + .. grid-item-card:: Manage licensing + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + Learn how to manage licensing in PyDPF-Core. + + +++ + Coming soon \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/licensing/index.rst b/doc/source/user_guide/tutorials/licensing/index.rst new file mode 100644 index 00000000000..e7760c435bb --- /dev/null +++ b/doc/source/user_guide/tutorials/licensing/index.rst @@ -0,0 +1,25 @@ +.. _ref_tutorials_licensing: + +========= +Licensing +========= + +This tutorial explains the DPF server licensing logic. Here you +learn about the Entry and Premium licensing capabilities + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Access Entry and Premium Capabilities + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/mathematics/basic_maths.rst b/doc/source/user_guide/tutorials/mathematics/basic_maths.rst new file mode 100644 index 00000000000..118783d55cb --- /dev/null +++ b/doc/source/user_guide/tutorials/mathematics/basic_maths.rst @@ -0,0 +1,790 @@ +.. _ref_basic_math: + +=========== +Basic maths +=========== + +.. note:: + + This tutorial requires DPF 9.1 or above (2025 R1). + +.. include:: ../../../links_and_refs.rst +.. |math operators| replace:: :mod:`math operators ` +.. |fields_factory| replace:: :mod:`fields_factory` +.. |fields_container_factory| replace:: :mod:`fields_container_factory` +.. |over_time_freq_fields_container| replace:: :func:`over_time_freq_fields_container()` +.. |add| replace:: :class:`add` +.. |add_fc| replace:: :class:`add_fc` +.. |minus| replace:: :class:`minus` +.. |minus_fc| replace:: :class:`minus_fc` +.. |accumulate| replace:: :class:`accumulate` +.. |accumulate_fc| replace:: :class:`accumulate_fc` +.. |cross_product| replace:: :class:`cross_product` +.. |cross_product_fc| replace:: :class:`cross_product_fc` +.. |component_wise_divide| replace:: :class:`component_wise_divide` +.. |component_wise_divide_fc| replace:: :class:`component_wise_divide_fc` +.. |generalized_inner_product| replace:: :class:`generalized_inner_product` +.. |generalized_inner_product_fc| replace:: :class:`generalized_inner_product_fc` +.. |overall_dot| replace:: :class:`overall_dot` +.. |outer_product| replace:: :class:`outer_product` +.. |pow| replace:: :class:`pow` +.. |pow_fc| replace:: :class:`pow_fc` +.. |sqr| replace:: :class:`sqr` +.. |sqrt| replace:: :class:`sqr` +.. |sqr_fc| replace:: :class:`sqr_fc` +.. |norm| replace:: :class:`norm` +.. |norm_fc| replace:: :class:`norm_fc` +.. |component_wise_product| replace:: :class:`component_wise_product` +.. |component_wise_product_fc| replace:: :class:`component_wise_product_fc` + +This tutorial explains how to perform some basic mathematical operations with PyDPF-Core. + +DPF exposes data through |Field| objects (or other specialized kinds of fields). +A |Field| is a homogeneous array of floats. + +A |FieldsContainer| is a labeled collection of |Field| objects that most operators can use, +allowing you to operate on several fields at once. + +To perform mathematical operations, use the operators available in the |math operators| module. +First create an instance of the operator of interest, then use the ``.eval()`` method to compute +and retrieve the first output. + +Most operators for mathematical operations can take in a |Field| or a |FieldsContainer|. + +Most mathematical operators have a separate implementation for handling |FieldsContainer| objects +as input, and are recognizable by the suffix ``_fc`` appended to their name. + +This tutorial first shows in :ref:`ref_basic_maths_create_custom_data` how to create the custom fields and field containers it uses. + +It then provides a focus on the effect of the scoping of the fields on the result in :ref:`ref_basic_maths_scoping_handling`, +as well as a focus on the treatment of collections in :ref:`ref_basic_maths_handling_of_collections`. + +It then explains how to use several of the mathematical operators available, both with fields and with field containers. + + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + + +.. _ref_basic_maths_create_custom_data : + +Create fields and field collections +----------------------------------- + +DPF exposes mathematical fields of floats through |Field| and |FieldsContainer| objects. +The |Field| is a homogeneous array of floats and a |FieldsContainer| is a labeled collection of |Field| objects. + +Here, fields and field collections created from scratch are used to show how the +mathematical operators work. + +For more information on creating a |Field| from scratch, see :ref:`ref_tutorials_data_structures`. + +.. tab-set:: + + .. tab-item:: Fields + + Create the fields based on: + + - A number of entities + - A list of IDs and a location, which together define the scoping of the field + + The location defines the type of entity the IDs refer to. It defaults to *nodal*, in which case the scoping is + understood as a list of node IDs, and the field is a nodal field. + + For a more detailed explanation about the influence of the |Scoping| on the operations, + see the :ref:`ref_basic_maths_scoping_handling` section of this tutorial. + + First import the necessary DPF modules. + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the math operators module + from ansys.dpf.core.operators import math as maths + + Create the fields with the |Field| class constructor. + + Helpers are also available in |fields_factory| for easier creation of fields from scratch. + + .. jupyter-execute:: + + # Create four nodal 3D vector fields of size 2 + num_entities = 2 + field1 = dpf.Field(nentities=num_entities) + field2 = dpf.Field(nentities=num_entities) + field3 = dpf.Field(nentities=num_entities) + field4 = dpf.Field(nentities=num_entities) + + # Set the scoping IDs + field1.scoping.ids = field2.scoping.ids = field3.scoping.ids = field4.scoping.ids = range(num_entities) + + # Set the data for each field using flat lists (of size = num_entities * num_components) + field1.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + field2.data = [7.0, 3.0, 5.0, 8.0, 1.0, 2.0] + field3.data = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + field4.data = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + + # Print the fields + print("Field 1","\n", field1, "\n"); print("Field 2","\n", field2, "\n"); + print("Field 3","\n", field3, "\n"); print("Field 4","\n", field4, "\n") + + .. tab-item:: Field containers + + Create the collections of fields (called "field containers") using the |fields_container_factory|. + Here, we use the |over_time_freq_fields_container| helper to generate a |FieldsContainer| with *'time'* labels. + + .. jupyter-execute:: + + # Create the field containers + fc1 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field1, field2]) + fc2 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field3, field4]) + + # Print the field containers + print("FieldsContainer1","\n", fc1, "\n") + print("FieldsContainer2","\n", fc2, "\n") + + +.. _ref_basic_maths_scoping_handling : + +Effect of the scoping +--------------------- + +The scoping of a DPF field stores information about which entity the data is associated to. +A scalar field containing data for three entities is, for example, linked to a scoping defining three entity IDs. +The location of the scoping defines the type of entity the IDs refer to. +This allows DPF to know what each data point of a field is associated to. + +Operators such as mathematical operators usually perform operations between corresponding entities of fields. + +For example, the addition of two scalar fields does not just add the two data arrays, +which may not be of the same length or may not be ordered the same way. +Instead it uses the scoping of each field to find corresponding entities, their data in each field, +and perform the addition on those. + +This means that the operation is usually performed for entities in the intersection of the two field scopings. + +Some operators provide options to handle data for entities outside of this intersection, +but most simply ignore the data for these entities not in the intersection of the scopings. + +The following examples illustrate this behavior. + +.. jupyter-execute:: + + # Instantiate two nodal 3D vector fields of length 3 + field5 = dpf.Field(nentities=3) + field6 = dpf.Field(nentities=3) + + # Set the data for each field + field5.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + field6.data = [5.0, 1.0, 6.0, 3.0, 8.0, 9.0, 7.0, 2.0, 4.0] + + # Set the scoping IDs (here node IDs) + field5.scoping.ids = [1, 2, 3] + field6.scoping.ids = [3, 4, 5] + + # Print the fields + print("Field 5", "\n", field5, "\n") + print("Field 6", "\n", field6, "\n") + +Here the only entities with matching IDs between the two fields are: + +- The third entity in field5 (ID=3) +- The first entity in field6 (ID=3) + +Other entities are not taken into account when using an operator that needs two operands. + +For example the |add| operator: + +.. jupyter-execute:: + + # Use the add operator + add_scop = dpf.operators.math.add(fieldA=field5, fieldB=field6).eval() + + # Print the result + # The resulting field only contains data for entities where a match is found in the other field. + # It has the size of the intersection of the two scopings. + # Here this means the addition returns a field with data only for the node with ID=3. + # This behavior is specific to each operator. + print(add_scop, "\n") + +Or the |generalized_inner_product| operator: + +.. jupyter-execute:: + + # Use the dot product operator + dot_scop = dpf.operators.math.generalized_inner_product(fieldA=field5, fieldB=field6).eval() + # ID 3: (7. * 5.) + (8. * 1.) + (9. * 6.) + + # Print the result + # The operator returns zero for entities where no match is found in the other field. + # The resulting field is the size of the union of the two scopings. + # This behavior is specific to each operator. + print(dot_scop,"\n") + print(dot_scop.data,"\n") + +.. _ref_basic_maths_handling_of_collections : + +Handling of collections +----------------------- + +Most mathematical operators have a separate implementation for handling |FieldsContainer| objects +as input, and are recognizable by the suffix ``_fc`` appended to their name. + +These operator operate on fields with the same label space. + +Using the two collections of fields built previously, both have a *time* label with an associated value for each field. + +Operators working with |FieldsContainer| inputs match fields from each collection with the same value for all labels. + +In this case, ``field 0`` of ``fc1`` with label space ``{"time": 1}`` gets matched up with ``field 0`` of ``fc2`` also with label space ``{"time": 1}``. +Then ``field 1`` of ``fc1`` with label space ``{"time": 2}`` gets matched up with ``field 1`` of ``fc2`` also with label space ``{"time": 2}``. + +Addition +-------- + +Use: + +- the |add| operator to compute the element-wise addition for each component of two fields +- the |accumulate| operator to compute the overall sum of data for each component of a field + +Element-wise addition +^^^^^^^^^^^^^^^^^^^^^ + +This operator computes the element-wise sum of two fields for each component. + +.. tab-set:: + + .. tab-item:: *add* + + .. jupyter-execute:: + + # Add the fields + add_field = maths.add(fieldA=field1, fieldB=field2).eval() + # id 0: [1.+7. 2.+3. 3.+5.] = [ 8. 5. 8.] + # id 1: [4.+8. 5.+1. 6.+2.] = [12. 6. 8.] + + # Print the results + print("Addition field ", add_field , "\n") + + .. tab-item:: *add_fc* + + .. jupyter-execute:: + + # Add the two field collections + add_fc = maths.add_fc(fields_container1=fc1, fields_container2=fc2).eval() + # {time: 1}: field1 + field3 + # --> id 0: [1.+6. 2.+5. 3.+4.] = [7. 7. 7.] + # id 1: [4.+3. 5.+2. 6.+1.] = [7. 7. 7.] + # + # {time: 2}: field2 + field4 + # --> id 0: [7.+4. 3.+1. 5.+8.] = [11. 4. 13.] + # id 1: [8.+5. 1.+7. 2.+9.] = [13. 8. 11.] + + # Print the results + print("Addition FieldsContainers","\n", add_fc , "\n") + print(add_fc.get_field({"time":1}), "\n") + print(add_fc.get_field({"time":2}), "\n") + +Overall sum +^^^^^^^^^^^ + +This operator computes the total sum of elementary data of a field, for each component of the field. +You can give a scaling ("weights") argument. + + Keep in mind the |Field| dimension. The |Field| represents 3D vectors, so each elementary data is a 3D vector. + The optional "weights" |Field| attribute is a scaling factor for each entity when performing the sum, + so you must provide a 1D field. + +Compute the total sum (accumulate) for each component of a given |Field|. + +.. tab-set:: + + .. tab-item:: *accumulate* + + .. jupyter-execute:: + + # Compute the total sum of a field + tot_sum_field = maths.accumulate(fieldA=field1).eval() + # vector component 0 = 1. + 4. = 5. + # vector component 1 = 2. + 5. = 7. + # vector component 2 = 3. + 6. = 9. + + # Print the results + print("Total sum fields","\n", tot_sum_field, "\n") + + .. tab-item:: *accumulate_fc* + + .. jupyter-execute:: + + # Find the total sum of the two field collections + tot_sum_fc = maths.accumulate_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> vector component 0 = 1.+ 4. = 5. + # vector component 1 = 2.+ 5. = 7. + # vector component 2 = 3.+ 6. = 9. + # + # {time: 2}: field2 + # --> vector component 0 = 7.+ 8. = 15. + # vector component 1 = 3.+ 1. = 4. + # vector component 2 = 5.+ 2. = 7. + + # Print the results + print("Total sum FieldsContainers","\n", tot_sum_fc , "\n") + print(tot_sum_fc.get_field({"time":1}), "\n") + print(tot_sum_fc.get_field({"time":2}), "\n") + +Compute the total sum (accumulate) for each component of a given |Field| using a scale factor field. + +.. tab-set:: + + .. tab-item:: *accumulate* + + .. jupyter-execute:: + + # Define the scale factor field + scale_vect = dpf.Field(nentities=num_entities, nature=dpf.natures.scalar) + # Set the scale factor field scoping IDs + scale_vect.scoping.ids = range(num_entities) + # Set the scale factor field data + scale_vect.data = [5., 2.] + + # Compute the total sum of the field using a scaling field + tot_sum_field_scale = maths.accumulate(fieldA=field1, weights=scale_vect).eval() + # vector component 0 = (1.0 * 5.0) + (4.0 * 2.0) = 13. + # vector component 1 = (2.0 * 5.0) + (5.0 * 2.0) = 20. + # vector component 2 = (3.0 * 5.0) + (6.0 * 2.0) = 27. + + # Print the results + print("Total weighted sum:","\n", tot_sum_field_scale, "\n") + + .. tab-item:: *accumulate_fc* + + .. jupyter-execute:: + + # Total scaled sum of the two field collections (accumulate) + tot_sum_fc_scale = maths.accumulate_fc(fields_container=fc1, weights=scale_vect).eval() + # {time: 1}: field1 + # --> vector component 0 = (1.0 * 5.0) + (4.0 * 2.0) = 13. + # vector component 1 = (2.0 * 5.0) + (5.0 * 2.0) = 20. + # vector component 2 = (3.0 * 5.0) + (6.0 * 2.0) = 27. + # + # {time: 2}: field2 + # --> vector component 0 = (7.0 * 5.0) + (8.0 * 2.0) = 51. + # vector component 1 = (3.0 * 5.0) + (1.0 * 2.0) = 17. + # vector component 2 = (5.0 * 5.0) + (2.0 * 2.0) = 29. + + # Print the results + print("Total sum FieldsContainers scale","\n", tot_sum_fc_scale , "\n") + print(tot_sum_fc_scale.get_field({"time":1}), "\n") + print(tot_sum_fc_scale.get_field({"time":2}), "\n") + +Subtraction +----------- + +Use the |minus| operator to compute the element-wise difference between each component of two fields. + +.. tab-set:: + + .. tab-item:: *minus* + + .. jupyter-execute:: + + # Subtraction of two 3D vector fields + minus_field = maths.minus(fieldA=field1, fieldB=field2).eval() + # id 0: [1.-7. 2.-3. 3.-5.] = [-6. -1. -2.] + # id 1: [4.-8. 5.-1. 6.-2.] = [-4. 4. 4.] + + # Print the results + print("Subtraction field","\n", minus_field , "\n") + + .. tab-item:: *minus_fc* + + .. jupyter-execute:: + + # Subtraction of two field collections + minus_fc = maths.minus_fc( + field_or_fields_container_A=fc1, + field_or_fields_container_B=fc2 + ).eval() + # {time: 1}: field1 - field3 + # --> id 0: [1.-6. 2.-5. 3.-4.] = [-5. -3. -1.] + # id 1: [4.-3. 5.-2. 6.-1.] = [1. 3. 5.] + # + # {time: 2}: field2 - field4 + # --> id 0: [7.-4. 3.-1. 5.-8.] = [3. 2. -3.] + # id 1: [8.-5. 1.-7. 2.-9.] = [3. -6. -7.] + + # Print the results + print("Subtraction field collection","\n", minus_fc , "\n") + print(minus_fc.get_field({"time":1}), "\n") + print(minus_fc.get_field({"time":2}), "\n") + +Element-wise product +-------------------- + +Use the |component_wise_product| operator to compute the element-wise product between each component of two fields. +Also known as the `Hadamard product `_, the *entrywise product* or *Schur product*. + +.. tab-set:: + + .. tab-item:: *component_wise_product* + + .. jupyter-execute:: + + # Compute the Hadamard product of two fields + element_prod_field = maths.component_wise_product(fieldA=field1, fieldB=field2).eval() + # id 0: [1.*7. 2.*3. 3.*5.] = [7. 6. 15.] + # id 1: [4.*8. 5.*1. 6.*2.] = [32. 5. 12.] + + # Print the results + print("Element-wise product field","\n", element_prod_field , "\n") + + .. tab-item:: *component_wise_product_fc* + + The current implementation of |component_wise_product_fc| only performs the Hadamard product + for each field in a collection with a distinct unique field. + + The element-wise product between two field collections is not implemented. + + .. jupyter-execute:: + + # Cross product of each field in a collection and a single unique field + element_prod_fc = maths.component_wise_product_fc(fields_container=fc1, fieldB=field3).eval() + # {time: 1}: field1 and field3 + # --> id 0: [1.*6. 2.*5. 3.*4.] = [6. 10. 12.] + # id 1: [4.*3. 5.*2. 6.*1.] = [12. 10. 6.] + # + # {time: 2}: field2 and field3 + # --> id 0: [7.*6. 3.*5. 5.*4.] = [42. 15. 20.] + # id 1: [8.*3. 1.*2. 2.*1.] = [24. 2. 2.] + + # Print the results + print("Element product FieldsContainer","\n", element_prod_fc , "\n") + print(element_prod_fc.get_field({"time":1}), "\n") + print(element_prod_fc.get_field({"time":2}), "\n") + + + +Cross product +------------- + +Use the |cross_product| operator to compute the `cross product `_ between two vector fields. + +.. tab-set:: + + .. tab-item:: *cross_product* + + .. jupyter-execute:: + + # Compute the cross product + cross_prod_field = maths.cross_product(fieldA=field1, fieldB=field2).eval() + # id 0: [(2.*5. - 3.*3.) (3.*7. - 1.*5.) (1.*3. - 2.*7.)] = [1. 16. -11.] + # id 1: [(5.*2. - 6.*1.) (6.*8. - 4.*2.) (4.*1. - 5.*8.)] = [4. 40. -36.] + + # Print the results + print("Cross product field","\n", cross_prod_field , "\n") + + .. tab-item:: *cross_product_fc* + + .. jupyter-execute:: + + # Cross product of two field collections + cross_prod_fc = maths.cross_product_fc(field_or_fields_container_A=fc1,field_or_fields_container_B=fc2).eval() + # {time: 1}: field1 X field3 + # --> id 0: [(2.*4. - 3.*5.) (3.*6. - 1.*4.) (1.*5. - 2.*6.)] = [-7. 14. -7.] + # id 1: [(5.*1. - 6.*2.) (6.*3. - 4.*1.) (4.*2. - 5.*3.)] = [-7. 14. -7.] + # + # {time: 2}: field2 X field4 + # --> id 0: [(3.*8. - 5.*1.) (5.*4. - 7.*8.) (7.*1. - 3.*4.)] = [19. -36. -5] + # id 1: [(1.*9. - 2.*7.) (2.*5. - 8.*9.) (8.*7. - 1.*5.)] = [-5. -62. 51.] + + # Print the results + print("Cross product FieldsContainer","\n", cross_prod_fc , "\n") + print(cross_prod_fc.get_field({"time":1}), "\n") + print(cross_prod_fc.get_field({"time":2}), "\n") + +Dot product +----------- + +Here, DPF provides two operations: + +- Use the |generalized_inner_product| operator to compute the `inner product `_ (also known as *dot product* or *scalar product*) between vector data of entities in two fields +- Use the |overall_dot| operator to compute the sum over all entities of the inner product of two vector fields + +Inner product +^^^^^^^^^^^^^ + +The |generalized_inner_product| operator computes a general notion of inner product between two vector fields. +In Cartesian coordinates it is equivalent to the dot/scalar product. + +.. tab-set:: + + .. tab-item:: *generalized_inner_product* + + .. jupyter-execute:: + + # Generalized inner product of two fields + dot_prod_field = maths.generalized_inner_product(fieldA=field1, fieldB=field2).eval() + # id 0: (1. * 7.) + (2. * 3.) + (3. * 5.) = 28. + # id 1: (4. * 8.) + (5. * 1.) + (6. * 2.) = 49. + + # Print the results + print("Dot product field","\n", dot_prod_field , "\n") + + .. tab-item:: *generalized_inner_product_fc* + + .. jupyter-execute:: + + # Generalized inner product of two field collections + dot_prod_fc = maths.generalized_inner_product_fc(field_or_fields_container_A=fc1, field_or_fields_container_B=fc2).eval() + # {time: 1}: field1 X field3 + # --> id 0: (1. * 6.) + (2. * 5.) + (3. * 4.) = 28. + # id 1: (4. * 3.) + (5. * 2.) + (6. * 1.) = 28. + # + # {time: 2}: field2 X field4 + # --> id 0: (7. * 4.) + (3. * 1.) + (5. * 8.) = 71. + # id 1: (8. * 5.) + (1. * 7.) + (2. * 9.) = 65. + + # Print the results + print("Dot product FieldsContainer","\n", dot_prod_fc , "\n") + print(dot_prod_fc.get_field({"time":1}), "\n") + print(dot_prod_fc.get_field({"time":2}), "\n") + +Overall dot product +^^^^^^^^^^^^^^^^^^^ + +The |overall_dot| operator creates two manipulations to give the result: + +1. it first computes a dot product between data of corresponding entities for two vector fields, resulting in a scalar field +2. it then sums the result obtained previously over all entities to return a scalar + +.. tab-set:: + + .. tab-item:: *overall_dot* + + .. jupyter-execute:: + + # Overall dot product of two fields + overall_dot = maths.overall_dot(fieldA=field1, fieldB=field2).eval() + # id 1: (1. * 7.) + (2. * 3.) + (3. * 5.) + (4. * 8.) + (5. * 1.) + (6. * 2.) = 77. + + # Print the results + print("Overall dot","\n", overall_dot , "\n") + + .. tab-item:: *overall_dot_fc* + + The ``overall_dot_fc`` operator is not available. + +Division +-------- + +Use the |component_wise_divide| operator to compute the +`Hadamard division `_ +between each component of two fields. + +.. tab-set:: + + .. tab-item:: *component_wise_divide* + + .. jupyter-execute:: + + # Divide a field by another field + comp_wise_div = maths.component_wise_divide(fieldA=field1, fieldB=field2).eval() + # id 0: [1./7. 2./3. 3./5.] = [0.143 0.667 0.6] + # id 1: [4./8. 5./1. 6./2.] = [0.5 5. 3.] + + # Print the results + print("Component-wise division field","\n", comp_wise_div , "\n") + + .. tab-item:: *component_wise_divide_fc* + + .. jupyter-execute:: + + # Component-wise division between two field collections + comp_wise_div_fc = maths.component_wise_divide_fc(fields_containerA=fc1, fields_containerB=fc2).eval() + # {time: 1}: field1 - field3 + # --> id 0: [1./6. 2./5. 3./4.] = [0.167 0.4 0.75] + # id 1: [4./3. 5./2. 6./1.] = [1.333 2.5 6.] + # + # {time: 2}: field2 - field4 + # --> id 0: [7./4. 3./1. 5./8.] = [1.75 3. 0.625] + # id 1: [8./5. 1./7. 2./9.] = [1.6 0.143 0.222] + + # Print the results + print("Component-wise division FieldsContainer","\n", comp_wise_div_fc , "\n") + print(comp_wise_div_fc.get_field({"time":1}), "\n") + print(comp_wise_div_fc.get_field({"time":2}), "\n") + +Power +----- + +Use: + +- the |pow| operator to compute the element-wise power of each component of a |Field| +- the |sqr| operator to compute the `Hadamard power `_ of each component of a |Field| +- the |sqrt| operator to compute the `Hadamard root `_ of each component of a |Field| + +*pow* operator +^^^^^^^^^^^^^^ + +The |pow| operator computes the element-wise power of each component of a |Field| to a given factor. + +This example computes the power of three. + +.. tab-set:: + + .. tab-item:: *pow* + + .. jupyter-execute:: + + # Define the power factor + pow_factor = 3.0 + # Compute the power of three of a field + pow_field = maths.pow(field=field1, factor=pow_factor).eval() + # id 0: [(1.^3.) (2.^3.) (3.^3.)] = [1. 8. 27.] + # id 1: [(4.^3.) (5.^3.) (6.^3.)] = [64. 125. 216.] + + # Print the results + print("Power field","\n", pow_field , "\n") + + .. tab-item:: *pow_fc* + + .. jupyter-execute:: + + # Compute the power of three of a field collection + pow_fc = maths.pow_fc(fields_container=fc1, factor=pow_factor).eval() + # {time: 1}: field1 + # --> id 0: [(1.^3.) (2.^3.) (3.^3.)] = [1. 8. 27.] + # id 1: [(4.^3.) (5.^3.) (6.^3.)] = [64. 125. 216.] + # + # {time: 2}: field2 + # --> id 0: [(7.^3.) (3.^3.) (5.^3.)] = [343. 27. 125.] + # id 1: [(8.^3.) (1.^3.) (2.^3.)] = [512. 1. 8.] + + # Print the results + print("Power FieldsContainer","\n", pow_fc , "\n") + print(pow_fc.get_field({"time":1}), "\n") + print(pow_fc.get_field({"time":2}), "\n") + +*sqr* operator +^^^^^^^^^^^^^^ + +The |sqr| operator computes the element-wise power of two +(`Hadamard power `_) +for each component of a |Field|. +It is a shortcut for the |pow| operator with factor 2. + +.. tab-set:: + + .. tab-item:: *sqr* + + .. jupyter-execute:: + + # Compute the power of two of a field + sqr_field = maths.sqr(field=field1).eval() + # id 0: [(1.^2.) (2.^2.) (3.^2.)] = [1. 4. 9.] + # id 1: [(4.^2.) (5.^2.) (6.^2.)] = [16. 25. 36.] + + print("^2 field","\n", sqr_field , "\n") + + .. tab-item:: *sqr_fc* + + .. jupyter-execute:: + + # Compute the power of two of a field collection + sqr_fc = maths.sqr_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^2.) (2.^2.) (3.^2.)] = [1. 4. 9.] + # id 1: [(4.^2.) (5.^2.) (6.^2.)] = [16. 25. 36.] + # + # {time: 2}: field2 + # --> id 0: [(7.^2.) (3.^2.) (5.^2.)] = [49. 9. 25.] + # id 1: [(8.^2.) (1.^2.) (2.^2.)] = [64. 1. 4.] + + # Print the results + print("^2 FieldsContainer","\n", sqr_fc , "\n") + print(sqr_fc.get_field({"time":1}), "\n") + print(sqr_fc.get_field({"time":2}), "\n") + +*sqrt* operator +^^^^^^^^^^^^^^^ + +The |sqrt| operator computes the element-wise square-root +(`Hadamard root `_) +for each component of a |Field|. +It is a shortcut for the |pow| operator with factor *0.5*. + +.. tab-set:: + + .. tab-item:: *sqrt* + + .. jupyter-execute:: + + # Compute the square-root of a field + sqrt_field = maths.sqrt(field=field1).eval() + # id 0: [(1.^0.5) (2.^0.5) (3.^0.5)] = [1. 1.414 1.732] + # id 1: [(4.^0.5) (5.^0.5) (6.^0.5)] = [2. 2.236 2.449] + + print("^0.5 field","\n", sqrt_field , "\n") + + .. tab-item:: *sqrt_fc* + + .. jupyter-execute:: + + # Compute the square-root of a field collection + sqrt_fc = maths.sqrt_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^.5) (2.^.5) (3.^.5)] = [1. 1.414 1.732] + # id 1: [(4.^.5) (5.^.5) (6.^.5)] = [2. 2.236 2.449] + # + # {time: 2}: field2 + # --> id 0: [(7.^.5) (3.^.5) (5.^.5)] = [2.645 1.732 2.236] + # id 1: [(8.^.5) (1.^.5) (2.^.5)] = [2.828 1. 1.414] + + # Print the results + print("Sqrt FieldsContainer","\n", sqrt_fc , "\n") + print(sqrt_fc.get_field({"time":1}), "\n") + print(sqrt_fc.get_field({"time":2}), "\n") + +Norm +---- + +Use the |norm| operator to compute the +`Lp norm `_ +of the elementary data for each entity of a |Field|. + +The default *Lp* norm is *Lp=L2*. + +.. tab-set:: + + .. tab-item:: *norm* + + .. jupyter-execute:: + + # Compute the L2 norm of a field + norm_field = maths.norm(field=field1, scalar_int=2).eval() + # id 0: [(1.^2.) + (2.^2.) + (3.^2.)] ^1/2 = 3.742 + # id 1: [(4.^2.) + (5.^2.) + (6.^2.)] ^1/2 = 8.775 + + # Print the results + print("Norm field","\n", norm_field , "\n") + + .. tab-item:: *norm_fc* + + .. jupyter-execute:: + + # Define the L2 norm of a field collection + norm_fc = maths.norm_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^2.) + (2.^2.) + (3.^2.)] ^1/2 = 3.742 + # id 1: [(4.^2.) + (5.^2.) + (6.^2.)] ^1/2 = 8.775 + # + # {time: 2}: field2 + # --> id 0: [(7.^2.) + (3.^2.) + (5.^2.)] ^1/2 = 9.110 + # id 1: [(8.^2.) + (1.^2.) + (2.^2.)] ^1/2 = 8.307 + + # Print the results + print("Norm FieldsContainer","\n", norm_fc , "\n") + print(norm_fc.get_field({"time":1}), "\n") + print(norm_fc.get_field({"time":2}), "\n") diff --git a/doc/source/user_guide/tutorials/mathematics/index.rst b/doc/source/user_guide/tutorials/mathematics/index.rst new file mode 100644 index 00000000000..584c43e31d0 --- /dev/null +++ b/doc/source/user_guide/tutorials/mathematics/index.rst @@ -0,0 +1,39 @@ +.. _ref_tutorials_mathematics: + +=========== +Mathematics +=========== + +DPF provides operators for implementing mathematical operations, ranging +from addition and multiplication to FFT and QR solving. + +This section explains how to you can do mathematical operations using +PyDPF-Core API and data structures. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Basic maths + :link: ref_basic_math + :link-type: ref + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial explains how to do some basic + mathematical operations with PyDPF-Core. + + +++ + Requires DPF 9.1 or above (2025 R1). + +.. toctree:: + :maxdepth: 2 + :hidden: + + basic_maths.rst + + + diff --git a/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst b/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst new file mode 100644 index 00000000000..902bd72be8e --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst @@ -0,0 +1,157 @@ +.. _ref_tutorials_create_a_mesh_from_scratch: + +========================== +Create a mesh from scratch +========================== + +.. include:: ../../../links_and_refs.rst + +This tutorial demonstrates how to build a |MeshedRegion| from scratch. + +The mesh object in DPF is a |MeshedRegion|. You can create your own |MeshedRegion| object and use it +with DPF operators. The ability to use scripting to create any DPF entity means +that you are not dependent on result files and can connect the DPF environment +with any Python tool. + +In this tutorial, we create a parallel piped mesh made of linear hexa elements. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Import the necessary modules +---------------------------- + +Import the ``ansys.dpf.core`` module, including the operators module and the numpy library. + +.. jupyter-execute:: + + # Import the numpy library + import numpy as np + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the operators module + from ansys.dpf.core import operators as ops + +Define the mesh dimensions +-------------------------- + +.. jupyter-execute:: + + # Define the mesh dimensions + length = 0.1 + width = 0.05 + depth = 0.1 + num_nodes_in_length = 10 + num_nodes_in_width = 5 + num_nodes_in_depth = 10 + # Create a MeshedRegion object + my_meshed_region = dpf.MeshedRegion() + +Define the connectivity function +-------------------------------- + +To create a mesh you must define the nodes connectivity. This means to define +the nodes ids connected to each element. + +Here, we create a function that will find this connectivity. + +.. jupyter-execute:: + + def search_sequence_numpy(arr, node): + """Find the node location in an array of nodes and return its index.""" + indexes = np.isclose(arr, node) + match = np.all(indexes, axis=1).nonzero() + return int(match[0][0]) + +Add nodes +--------- + +Add |Nodes| to the |MeshedRegion| object. + +.. jupyter-execute:: + + node_id = 1 + for i, x in enumerate( + [float(i) * length / float(num_nodes_in_length) for i in range(0, num_nodes_in_length)] + ): + for j, y in enumerate( + [float(i) * width / float(num_nodes_in_width) for i in range(0, num_nodes_in_width)] + ): + for k, z in enumerate( + [float(i) * depth / float(num_nodes_in_depth) for i in range(0, num_nodes_in_depth)] + ): + my_meshed_region.nodes.add_node(node_id, [x, y, z]) + node_id += 1 + +Get the nodes coordinates field. + +.. jupyter-execute:: + + my_nodes_coordinates = my_meshed_region.nodes.coordinates_field + +Set the mesh properties +----------------------- + +Set the mesh unit. + +.. jupyter-execute:: + + my_meshed_region.unit = "mm" + +Set the nodes coordinates. + +.. jupyter-execute:: + + # Get the nodes coordinates data + my_nodes_coordinates_data = my_nodes_coordinates.data + # As we use the connectivity function we need to get the data as a list + my_nodes_coordinates_data_list = my_nodes_coordinates.data_as_list + # Set the nodes scoping + my_coordinates_scoping = my_nodes_coordinates.scoping + +Add elements +------------ +Add |Elements| to the |MeshedRegion| object. + +.. jupyter-execute:: + + # Add solid elements (linear hexa with eight nodes): + element_id = 1 + # Precompute node spacings + dx = length / float(num_nodes_in_length) + dy = width / float(num_nodes_in_width) + dz = depth / float(num_nodes_in_depth) + # Generate node coordinates + x_coords = [i * dx for i in range(num_nodes_in_length - 1)] + y_coords = [j * dy for j in range(num_nodes_in_width - 1)] + z_coords = [k * dz for k in range(num_nodes_in_depth - 1)] + # Iterate through the grid + for x in x_coords: + for y in y_coords: + for z in z_coords: + coord1 = np.array([x, y, z]) + connectivity = [] + # Generate connectivity for the current element + for xx in [x, x + dx]: + for yy in [y, y + dy]: + for zz in [z, z + dz]: + scoping_index = search_sequence_numpy(my_nodes_coordinates_data, + [xx, yy, zz]) + connectivity.append(scoping_index) + # Rearrange connectivity to maintain element orientation + connectivity[2], connectivity[3] = connectivity[3], connectivity[2] + connectivity[6], connectivity[7] = connectivity[7], connectivity[6] + # Add the solid element + my_meshed_region.elements.add_solid_element(element_id, connectivity) + element_id += 1 + +Plot the mesh +------------- + +You can check the mesh we just created with a plot. For more information on how to plot a mesh see +the :ref:`ref_tutorials_plotting_meshes` tutorial. + +.. jupyter-execute:: + + # Plot the mesh + my_meshed_region.plot() \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/explore_mesh.rst b/doc/source/user_guide/tutorials/mesh/explore_mesh.rst new file mode 100644 index 00000000000..03e82628d99 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/explore_mesh.rst @@ -0,0 +1,281 @@ +.. _ref_tutorials_explore_mesh: + +============== +Explore a mesh +============== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |element_types| replace:: :class:`list of available element types in a DPF mesh` + +This tutorial explains how to access a mesh data and metadata so it can be manipulated. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the mesh +--------------- + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your +own from scratch or by getting it from a result file. For more information check the +:ref:`ref_tutorials_create_a_mesh_from_scratch` and :ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +For this tutorial, we get a |MeshedRegion| from a result file. You can use one available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_static_rst() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the mesh + meshed_region_1 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_2 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the model + model_3 = dpf.Model(data_sources=result_file_path_3) + # Get the mesh + meshed_region_3 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the model + model_4 = dpf.Model(data_sources=result_file_path_4) + # Get the mesh + meshed_region_4 = model_4.metadata.meshed_region + +Explore the mesh data +--------------------- + +You can access the mesh data by manipulating the |MeshedRegion| object methods. +The mesh data includes : + +- Unit +- Nodes, elements and faces +- Named selections + +The :method:`MeshedRegion.nodes `, :method:`MeshedRegion.elements `, :method:`MeshedRegion.faces ` and :method:`MeshedRegion.named_selections ` properties give corresponding DPF objects: +|Nodes|, |Elements|, |Faces| and |Scoping|. + +For more information of other types of data you can get from a mesh, see the API reference of the |MeshedRegion| class. + +In this tutorial, we explore the data about the mesh nodes. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_1 = meshed_region_1.nodes + + # Print the object type + print("Object type: ",type(nodes_1),'\n') + + # Print the nodes + print("Nodes: ", nodes_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_2 = meshed_region_2.nodes + + # Print the object type + print("Object type: ",type(nodes_2),'\n') + + # Print the nodes + print("Nodes: ", nodes_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_3 = meshed_region_3.nodes + + # Print the object type + print("Object type: ",type(nodes_3),'\n') + + # Print the nodes + print("Nodes: ", nodes_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_4 = meshed_region_4.nodes + + # Print the object type + print("Object type: ",type(nodes_4),'\n') + + # Print the nodes + print("Nodes: ", nodes_4) + +Explore the mesh metadata +------------------------- + +You can access the mesh metadata by manipulating the |MeshedRegion| object properties. + +The mesh metadata information describes the mesh composition. + +You can access which metadata information is available for a given result file. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the available properties + available_props_1 = meshed_region_1.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the available properties + available_props_2 = meshed_region_2.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the available properties + available_props_3 = meshed_region_3.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the available properties + available_props_4 = meshed_region_4.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_4) + +You can also chose which property you want to extract. + +When extracting the properties you get a |PropertyField| with that information. Their data is mapped to +the entity they are defined at. + +Here, we extract the element types for the mesh elements. + +The element type is given as a number. See the |element_types| to find the +corresponding element name. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_1 = meshed_region_1.elements.element_types_field + + # Print the element types by element + print(el_types_1) + + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_2 = meshed_region_2.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_3 = meshed_region_3.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_4 = meshed_region_4.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_4) + +For more information about how to explore a mesh metadata before extracting it from a result file, see the +:ref:`ref_tutorials_explore_mesh_metadata` tutorial. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst b/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst new file mode 100644 index 00000000000..e5f9a82f975 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst @@ -0,0 +1,189 @@ +.. _ref_tutorials_explore_mesh_metadata: + +======================= +Explore a mesh metadata +======================= + +:bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. note:: + + This tutorial requires DPF 9.1 or above (2025 R1). + +.. include:: ../../../links_and_refs.rst + +This tutorial explains how to read a mesh metadata (data about the elements, nodes, faces, region, zone ...) +before extracting the mesh from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorial section. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + +Create the |Model| +------------------ + +Create a |Model| object with the result file. The |Model| is a helper designed to give shortcuts to +access the analysis results metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the Model + model_2 = dpf.Model(data_sources=ds_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Create the Model + model_3 = dpf.Model(data_sources=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Create the Model + model_4 = dpf.Model(data_sources=result_file_path_4) + +Explore the mesh metadata +------------------------- + +You can access the mesh metadata with the |MeshInfo| object. It reads the metadata information before extracting +the |MeshedRegion| from the result file. + +The mesh metadata information is stored in a |PropertyField| or in a |StringField|. They contain information +that describes the mesh composition and their data is mapped to the entity they are defined at. +The mesh metadata information information can be: + +- Properties +- Parts +- Faces +- Bodies +- Zones +- Number of nodes and elements +- Elements types + +You can access which metadata information is available for a given result file. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_2 = model_2.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_3 = model_3.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_4 = model_4.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_4) + +You can also extract each of those mesh metadata information by manipulating the |MeshInfo| object properties. + +For example, we can check the part names (for the LSDYNA result file) or the cell zone names +(for the Fluent or CFX result files): + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the part names + cell_zones_2 = mesh_info_2.get_property("part_names") + + # Print the part names + print(cell_zones_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the cell zone names + cell_zones_3 = mesh_info_3.get_property("cell_zone_names") + + # Print the cell zone names + print(cell_zones_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the cell zone names + cell_zones_4 = mesh_info_4.get_property("cell_zone_names") + + # Print the cell zone names + print(cell_zones_4) + +For more information on reading a mesh from a LSDYNA, Fluent or CFX file check the :ref:`examples_lsdyna`, +:ref:`fluids_examples` and :ref:`examples_cfx` examples sections. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst b/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst new file mode 100644 index 00000000000..ed8aad1fcb1 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst @@ -0,0 +1,118 @@ +.. _ref_tutorials_extract_mesh_in_split_parts: + +============================= +Extract a mesh in split parts +============================= + +:bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst + +.. |meshes_provider| replace:: :class:`meshes_provider ` + +This tutorial shows how to extract meshes split on a given space or time from a result file. + +To accomplish this goal, you must use the |meshes_provider| operator. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the |DataSources| +------------------------ + +We must create a |DataSources| object so the |meshes_provider| operator can access the mesh. This object +manages paths to their files. + +For this tutorial, you can use a result file available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorial section. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the DataSources object + ds_3 = dpf.DataSources(result_path=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the DataSources object + ds_4 = dpf.DataSources(result_path=result_file_path_4) + +Extract the mesh in split parts +------------------------------- + +Instanciate and evaluate the |meshes_provider| operator. +The split meshes are given in a |MeshesContainer| and can be spatially or temporally varying. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator + meshes_31 = ops.mesh.meshes_provider(data_sources=ds_3).eval() + + # Print the meshes + print(meshes_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator + meshes_41 = ops.mesh.meshes_provider(data_sources=ds_4).eval() + + # Print the meshes + print(meshes_41) + +Scope the mesh regions to be extracted in split regions +------------------------------------------------------- + +A region corresponds to a zone for Fluid and CFX results. You can specify the mesh regions you want to get by giving +the zones ids to the ``region_scoping`` argument. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator and specify a region + meshes_32 = ops.mesh.meshes_provider(data_sources=ds_3, region_scoping=[3,12]).eval() + + # Print the meshes + print(meshes_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator specifying a region + meshes_42 = ops.mesh.meshes_provider(data_sources=ds_4, region_scoping=[5,8]).eval() + + # Print the meshes + print(meshes_42) diff --git a/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst b/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst new file mode 100644 index 00000000000..085e8782aed --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst @@ -0,0 +1,269 @@ +.. _ref_tutorials_get_mesh_from_result_file: + +============================= +Get a mesh from a result file +============================= + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst + +.. |mesh_provider| replace:: :class:`mesh_provider ` + +This tutorial explains how to extract a mesh from a result file. + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your +own from scratch or by getting it from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Import the result file +---------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorials section. + +Here, we create a |DataSources| object so the data can be directly accessed by different +PyDPF-Core APIs. This object manages paths to their files. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_static_rst() + # Create the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the DataSources object + ds_3 = dpf.DataSources(result_path=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the DataSources object + ds_4 = dpf.DataSources(result_path=result_file_path_4) + + +Get the mesh from the result file +--------------------------------- + +You can get the mesh from a result file by two methods: + +- :ref:`Using the DPF Model `; +- :ref:`Using the mesh_provider operator `. + +.. note:: + + A |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. + +.. _get_mesh_model: + +Using the DPF |Model| +^^^^^^^^^^^^^^^^^^^^^ + +The |Model| is a helper designed to give shortcuts to access the analysis results +metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +Get the |MeshedRegion| by instantiating a |Model| object and accessing its metadata. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Create the Model + model_1 = dpf.Model(data_sources=ds_1) + # Get the mesh + meshed_region_11 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Create the Model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_21 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Create the Model + model_3 = dpf.Model(data_sources=ds_3) + # Get the mesh + meshed_region_31 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Create the Model + model_4 = dpf.Model(data_sources=ds_4) + # Get the mesh + meshed_region_41 = model_4.metadata.meshed_region + +Printing the |MeshedRegion| displays the mesh dimensions: + +- Number of nodes and elements +- Unit +- Elements type + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_11) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_21) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_41) + +.. _get_mesh_mesh_provider: + +Using the |mesh_provider| operator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Get the |MeshedRegion| by instantiating the |mesh_provider| operator with the +|DataSources| object as an argument. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_12 = ops.mesh.mesh_provider(data_sources=ds_1).eval() + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_22 = ops.mesh.mesh_provider(data_sources=ds_2).eval() + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_32 = ops.mesh.mesh_provider(data_sources=ds_3).eval() + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_42 = ops.mesh.mesh_provider(data_sources=ds_4).eval() + +Printing the |MeshedRegion| displays the mesh dimensions: + +- Number of nodes and elements +- Unit +- Elements type + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_12) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_22) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_42) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/index.rst b/doc/source/user_guide/tutorials/mesh/index.rst new file mode 100644 index 00000000000..ab4695de7ab --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/index.rst @@ -0,0 +1,89 @@ +.. _ref_tutorials_mesh: + +====== +Meshes +====== + +The mesh in DPF is represented by the :class:`MeshedRegion ` entity. + +These tutorials explains how to explore different attributes of a given mesh with PyDPF-Core. + + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Create a mesh from scratch + :link: ref_tutorials_create_a_mesh_from_scratch + :link-type: ref + :text-align: center + + This tutorial demonstrates how to build a mesh from the scratch. + + .. grid-item-card:: Get a mesh from a result file + :link: ref_tutorials_get_mesh_from_result_file + :link-type: ref + :text-align: center + + This tutorial explains how to extract a mesh from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Explore a mesh metadata + :link: ref_tutorials_explore_mesh_metadata + :link-type: ref + :text-align: center + :class-card: sd-bg-light + :class-header: sd-bg-light sd-text-dark + :class-footer: sd-bg-light sd-text-dark + + This tutorial explains how to explore a mesh metadata before + extracting the mesh from a result file. + + +++ + :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + Requires DPF 9.1 or above (2025 R1). + + .. grid-item-card:: Explore a mesh + :link: ref_tutorials_explore_mesh + :link-type: ref + :text-align: center + + This tutorial explains how to access a mesh data and metadata + so it can be manipulated. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract a mesh in split parts + :link: ref_tutorials_extract_mesh_in_split_parts + :link-type: ref + :text-align: center + + This tutorial shows how to extract meshes split on a given space or time from a result file. + + +++ + :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Split a mesh + :link: ref_tutorials_split_mesh + :link-type: ref + :text-align: center + + This tutorial shows how to split a mesh on a given property. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + create_a_mesh_from_scratch.rst + get_mesh_from_result_file.rst + explore_mesh_metadata.rst + explore_mesh.rst + extract_mesh_in_split_parts.rst + split_mesh.rst diff --git a/doc/source/user_guide/tutorials/mesh/split_mesh.rst b/doc/source/user_guide/tutorials/mesh/split_mesh.rst new file mode 100644 index 00000000000..3ac8116bfef --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/split_mesh.rst @@ -0,0 +1,237 @@ +.. _ref_tutorials_split_mesh: + +============ +Split a mesh +============ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst + +.. |split_mesh| replace:: :class:`split_mesh ` +.. |split_on_property_type| replace:: :class:`split_on_property_type ` +.. |from_scopings| replace:: :class:`from_scopings ` + +This tutorial shows how to split a mesh on a given property. + +There are two approaches to accomplish this goal: + +- :ref:`Use the split_mesh operator to split a already existing MeshedRegion`; +- :ref:`Split the mesh scoping and create the split MeshedRegion objects `. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the mesh +--------------- + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. For more +information check the :ref:`ref_tutorials_create_a_mesh_from_scratch` and :ref:`ref_tutorials_get_mesh_from_result_file` +tutorials. + +For this tutorial, we get a |MeshedRegion| from a result file. You can use one available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_multishells_rst() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the mesh + meshed_region_1 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_2 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the model + model_3 = dpf.Model(data_sources=result_file_path_3) + # Get the mesh + meshed_region_3 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the model + model_4 = dpf.Model(data_sources=result_file_path_4) + # Get the mesh + meshed_region_4 = model_4.metadata.meshed_region + +.. _ref_first_approach_split_mesh: + +First approach +-------------- + +This approach consists in splitting an already existing |MeshedRegion| based on a given property. To accomplish +that end, you must use the |split_mesh| operator. Currently you can split a mesh by material or eltype. + +The split mesh parts are stored in the DPF collection called |MeshesContainer|, where they are ordered by *labels*. +When you use the |split_mesh| operator, each split mesh part has two different *labels*: + +- A "body" *label* +- A *label* with the property used to split the mesh + +Here, we split the |MeshedRegion| by material. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Split the mesh by material + meshes_11 = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Print the meshes + print(meshes_11) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Split the mesh by material + meshes_21 = ops.mesh.split_mesh(mesh=meshed_region_2, property="mat").eval() + + # Print the meshes + print(meshes_21) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Split the mesh by material + meshes_31 = ops.mesh.split_mesh(mesh=meshed_region_3, property="mat").eval() + + # Print the meshes + print(meshes_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Split the mesh by material + meshes_41 = ops.mesh.split_mesh(mesh=meshed_region_4, property="mat").eval() + # Print the meshes + print(meshes_41) + +.. _ref_second_approach_split_mesh: + +Second approach +--------------- + +This approach consists in splitting the |Scoping| of a given |MeshedRegion| based on a given property and then creating +a new |MeshedRegion| for each split |Scoping|. + +To accomplish this goal you must follow these steps: + +#. Use the |split_on_property_type| operator to split the mesh |Scoping|. + This operator splits a |Scoping| on a given property (elshape and/or material, since 2025R1 it supports any + scalar property field name contained in the mesh property fields). The split |Scoping| is stored in the DPF + collection called |ScopingsContainer|, where they are ordered by *labels*. In this case, you get *labels* with + the property used to split the |Scoping|. + +#. Create the split |MeshedRegion| objects using the |from_scopings| operator for the |Scoping| of interest. + The split parts are stored in the DPF collection called |MeshesContainer| where they are also ordered by *labels*. + These *labels* are corresponding to the "mat" labels gotten with the |split_on_property_type| operator. + +Here, we split the mesh scoping by material and create a |MeshedRegion| for all the split |Scoping| in the +|ScopingsContainer|. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_1 = ops.scoping.split_on_property_type(mesh=meshed_region_1, label1="mat").eval() + # Get the split meshes + meshes_12 = ops.mesh.from_scopings(scopings_container=split_scoping_1, mesh=meshed_region_1).eval() + # Print the meshes + print(meshes_12) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_2 = ops.scoping.split_on_property_type(mesh=meshed_region_2, label1="mat").eval() + # Get the split meshes + meshes_22 = ops.mesh.from_scopings(scopings_container=split_scoping_2, mesh=meshed_region_2).eval() + # Print the meshes + print(meshes_22) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_3 = ops.scoping.split_on_property_type(mesh=meshed_region_3, label1="mat").eval() + # Get the split meshes + meshes_32 = ops.mesh.from_scopings(scopings_container=split_scoping_3, mesh=meshed_region_3).eval() + # Print the meshes + print(meshes_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_4 = ops.scoping.split_on_property_type(mesh=meshed_region_4, label1="mat").eval() + # Get the split meshes + meshes_42 = ops.mesh.from_scopings(scopings_container=split_scoping_4, mesh=meshed_region_4).eval() + # Print the meshes + print(meshes_42) diff --git a/doc/source/user_guide/tutorials/operators_and_workflows/index.rst b/doc/source/user_guide/tutorials/operators_and_workflows/index.rst new file mode 100644 index 00000000000..900657acab9 --- /dev/null +++ b/doc/source/user_guide/tutorials/operators_and_workflows/index.rst @@ -0,0 +1,41 @@ +.. _ref_tutorials_operators_and_workflows: + +========================================= +Process data with operators and workflows +========================================= + +An operator is the main object used to create, transform, and stream data in DPF. + +They can perform different modifications of the data: direct mathematical operations, +averaging on the mesh, changes in the model locations.... They can also be chained together +to create workflows for more complex operations and customizable results. + +The tutorials in this section present how to create and use these operators and workflows in PyDPF-Core. + +For more information on how to program with PyDPF-Core check the +:ref:`ref_tutorials_language_and_usage` tutorial. + + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Use operators + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create workflows + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/plot/index.rst b/doc/source/user_guide/tutorials/plot/index.rst new file mode 100644 index 00000000000..0b14860821a --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/index.rst @@ -0,0 +1,49 @@ +.. _ref_tutorials_plot: + +==== +Plot +==== + +These tutorials demonstrate different ways one can visualize the data in plots using PyDPF-Core. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Plot a mesh + :link: ref_tutorials_plot_mesh + :link-type: ref + :text-align: center + + This tutorial shows several ways to plot meshes. + + .. grid-item-card:: Add deformation + :link: ref_tutorials_plot_deformed_mesh + :link-type: ref + :text-align: center + + This tutorial shows how to add deformation to plots. + + .. grid-item-card:: Plot contours + :link: ref_tutorials_plot_contour + :link-type: ref + :text-align: center + + This tutorial shows how to plot contours. + + .. grid-item-card:: Plot a graph + :link: ref_tutorials_plot_graph + :link-type: ref + :text-align: center + + This tutorial shows how to plot graphs using matplotlib. + +.. toctree:: + :maxdepth: 2 + :hidden: + + plot_mesh.rst + plot_deformed_mesh.rst + plot_contour.rst + plot_a_graph.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/plot/plot_a_graph.rst b/doc/source/user_guide/tutorials/plot/plot_a_graph.rst new file mode 100644 index 00000000000..82f4db32b2c --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_a_graph.rst @@ -0,0 +1,260 @@ +.. _ref_tutorials_plot_graph: + +============================= +Plot a graph using matplotlib +============================= + +.. include:: ../../../links_and_refs.rst + +.. |Line| replace:: :class:`Line ` +.. |on_coordinates| replace:: :class:`on_coordinates ` +.. |Line.path| replace:: :py:attr:`Line.path ` +.. |min_max_fc| replace:: :class:`min_max_fc ` + +This tutorial explains how to plot a graph with data from DPF using `matplotlib `_. + +The current |DpfPlotter| module does not allow to plot graphs. Instead, you need to import the +`matplotlib `_ library to plot graphs with PyDPF-Core. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +There is a large range of graphs you can plot. Here, we showcase: + +- :ref:`A graph of a result along a path ` +- :ref:`A graph of transient data ` + +.. _ref_graph_result_space: + +Result along a path +------------------- + +In this tutorial, we plot the norm of the displacement along a custom path represented by a |Line|. +For more information about how to create a custom geometric object, +see the :ref:`ref_tutorials_plot_on_custom_geometry` tutorial. + +We first need to get the data of interest, then create a custom |Line| geometry for the path. +We then map the result on the path, and finally create a 2D graph. + +Extract the data +^^^^^^^^^^^^^^^^ + +First, extract the data from a result file or create some from scratch. +For this tutorial we use a case available in the |Examples| module. +For more information on how to import your own result file in DPF, +or on how to create data from user input in PyDPF-Core,see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + # Import the geometry module + from ansys.dpf.core import geometry as geo + + # Import the ``matplotlib.pyplot`` module + import matplotlib.pyplot as plt + + # Download and get the path to an example result file + result_file_path_1 = examples.find_static_rst() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +We then extract the result of interest for the graph. +In this tutorial, we want the norm of the displacement field at the last step. + +.. jupyter-execute:: + + # Get the nodal displacement field at the last simulation step (default) + disp_results_1 = model_1.results.displacement.eval() + + # Get the norm of the displacement field + norm_disp = ops.math.norm_fc(fields_container=disp_results_1).eval() + +Define the path +^^^^^^^^^^^^^^^ + +Create a path as a |Line| passing through the diagonal of the mesh. + +.. jupyter-execute:: + + # Create a discretized line for the path + line_1 = geo.Line(coordinates=[[0.0, 0.06, 0.0], [0.03, 0.03, 0.03]], n_points=50) + # Plot the line on the original mesh + line_1.plot(mesh=model_1.metadata.meshed_region) + +Map the data on the path +^^^^^^^^^^^^^^^^^^^^^^^^ + +Map the displacement norm field to the |Line| using the |on_coordinates| mapping operator. + +This operator interpolates field values at given node coordinates, using element shape functions. + +It takes as input a |FieldsContainer| of data, a 3D vector |Field| of coordinates to interpolate at, +and an optional |MeshedRegion| to use for element shape functions if the first |Field| in the data +provided does not have an associated meshed support. + +.. jupyter-execute:: + + # Interpolate the displacement norm field at the nodes of the custom path + disp_norm_on_path_fc: dpf.FieldsContainer = ops.mapping.on_coordinates( + fields_container=norm_disp, + coordinates=line_1.mesh.nodes.coordinates_field, + ).eval() + # Extract the only field in the collection obtained + disp_norm_on_path: dpf.Field = disp_norm_on_path_fc[0] + print(disp_norm_on_path) + +Plot the graph +^^^^^^^^^^^^^^ + +Plot a graph of the norm of the displacement field along the path using the +`matplotlib `_ library. + +To get the parametric coordinates of the nodes along the line and use them as X-axis, +you can use the |Line.path| property. +It gives the 1D array of parametric coordinates of the nodes of the line along the line. + +The values in the displacement norm field are in the same order as the parametric +coordinates because the mapping operator orders output data the same as the input coordinates. + +.. jupyter-execute:: + + # Get the field of parametric coordinates along the path for the X-axis + line_coordinates = line_1.path + + # Define the curve to plot + plt.plot(line_coordinates, disp_norm_on_path.data) + + # Add titles to the axes and the graph + plt.xlabel("Position on path") + plt.ylabel("Displacement norm") + plt.title("Displacement norm along the path") + + # Display the graph + plt.show() + +.. _ref_graph_result_time: + +Transient data +-------------- + +In this tutorial, we plot the minimum and maximum displacement norm over time for a transient analysis. +For more information about using PyDPF-Core with a transient analysis, +see the :ref:`static_transient_examples` examples. + +We first need to create data for the Y-axis, +and then format the time information of the model for the X-axis, +to finally create a 2D graph using both. + +Prepare data +^^^^^^^^^^^^ + +First, extract the data from a transient result file or create some from scratch. +For this tutorial we use a transient case available in the |Examples| module. +For more information on how to import your own result file in DPF, +or on how to create data from user input in PyDPF-Core, see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Import the ``matplotlib.pyplot`` module + import matplotlib.pyplot as plt + + # Download and get the path to an example transient result file + result_file_path_2 = examples.download_transient_result() + + # Create a model from the result file + model_2 = dpf.Model(data_sources=result_file_path_2) + + # Check the model is transient with its ``TimeFreqSupport`` + print(model_2.metadata.time_freq_support) + +We then extract the result of interest for the graph. +In this tutorial, we want the maximum and minimum displacement norm over the field at each time step. + +First extract the displacement field for every time step. + +.. jupyter-execute:: + + # Get the displacement at all time steps + disp_results_2: dpf.FieldsContainer = model_2.results.displacement.on_all_time_freqs.eval() + +Next, get the minimum and maximum of the norm of the displacement at each time step using the |min_max_fc| operator. + +.. jupyter-execute:: + + # Instantiate the min_max operator and give the output of the norm operator as input + min_max_op = ops.min_max.min_max_fc(fields_container=ops.math.norm_fc(disp_results_2)) + + # Get the field of maximum values at each time-step + max_disp: dpf.Field = min_max_op.outputs.field_max() + print(max_disp) + + # Get the field of minimum values at each time-step + min_disp: dpf.Field = min_max_op.outputs.field_min() + print(min_disp) + +The operator already outputs fields where data points are associated to time-steps. + +Prepare time values +^^^^^^^^^^^^^^^^^^^ + +The time or frequency information associated to DPF objects is stored in |TimeFreqSupport| objects. + +You can use the |TimeFreqSupport| of a |Field| with location ``time_freq`` to retrieve the time or +frequency values associated to the entities mentioned in its scoping. + +Here the fields are on all time-steps, so we can simply get the list of all time values without filtering. + +.. jupyter-execute:: + + # Get the field of time values + time_steps_1: dpf.Field = disp_results_2.time_freq_support.time_frequencies + + # Print the time values + print(time_steps_1) + +The time values associated to time-steps are given in a |Field|. +To use it in the graph you need to extract the data of the |Field| as an array. + +.. jupyter-execute:: + + # Get the time values + time_data = time_steps_1.data + print(time_data) + + +Plot the graph +^^^^^^^^^^^^^^ + +Plot a graph of the minimum and maximum displacement over time using the +`matplotlib `_ library. + +Use the ``unit`` property of the fields to properly label the axes. + +.. jupyter-execute:: + + # Define the plot figure + plt.plot(time_data, max_disp.data, "r", label="Max") + plt.plot(time_data, min_disp.data, "b", label="Min") + + # Add axis labels and legend + plt.xlabel(f"Time ({time_steps_1.unit})") + plt.ylabel(f"Displacement ({max_disp.unit})") + plt.legend() + + # Display the graph + plt.show() diff --git a/doc/source/user_guide/tutorials/plot/plot_contour.rst b/doc/source/user_guide/tutorials/plot/plot_contour.rst new file mode 100644 index 00000000000..3ec2b13d06f --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_contour.rst @@ -0,0 +1,284 @@ +.. _ref_tutorials_plot_contour: + +============= +Plot contours +============= + +.. include:: ../../../links_and_refs.rst + +.. |Field.plot| replace:: :py:meth:`Field.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh() ` +.. |add_field| replace:: :py:meth:`add_field() ` +.. |show_figure| replace:: :py:meth:`show_figure() ` +.. |to_nodal_fc| replace:: :py:class:`to_nodal_fc ` +.. |select_component| replace:: :func:`select_component() ` +.. |stress_op| replace:: :py:class:`stress ` +.. |Field.meshed_region| replace:: :py:attr:`Field.meshed_region ` +.. |FieldsContainer.plot| replace:: :py:meth:`FieldsContainer.plot() ` +.. |split_fields| replace:: :py:class:`split_fields ` +.. |split_mesh| replace:: :py:class:`split_mesh ` + +This tutorial shows different commands for plotting data contours on meshes. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +Load a result file in a model +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For this tutorial, we use mesh information and data from a case available in the |Examples| module. +For more information on how to import your own result file in DPF, see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Extract data for the contour +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Extract data for the contour. For more information about extracting results from a result file, +see the :ref:`ref_tutorials_import_data` tutorials section. + +.. note:: + + Only the *'elemental'* or *'nodal'* locations are supported for plotting. + +Here, we choose to plot the XX component of the stress tensor. + +.. jupyter-execute:: + + # Get the stress operator for component XX + stress_XX_op = ops.result.stress_X(data_sources=model_1) + + # The default behavior of the operator is to return data as *'ElementalNodal'* + print(stress_XX_op.eval()) + +We must request the stress in a *'nodal'* location as the default *'ElementalNodal'* location for the stress results +is not supported for plotting. + +There are different ways to change the location. Here, we define the new location using the input of the |stress_op| +operator. Another option would be using an averaging operator on the output of the stress operator, +like the |to_nodal_fc| operator + +.. jupyter-execute:: + + # Define the desired location as an input of the stress operator + stress_XX_op.inputs.requested_location(dpf.locations.nodal) + + # Get the output + stress_XX_fc = stress_XX_op.eval() + +The output if a collection of fields, a |FieldsContainer|. + +Extract a mesh +^^^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot a contour of a single field +-------------------------------- + +To plot a single |Field|, you can use: + +- the |Field.plot| method +- the |MeshedRegion.plot| method with the field as argument +- the |DpfPlotter| class and its |add_field| method + +.. hint:: + + Using the |DpfPlotter| class is more performant than using the |Field.plot| method + +.. tab-set:: + + .. tab-item:: Field.plot() + + First, get a |Field| from the stress results |FieldsContainer|. Then, use the |Field.plot| method [1]_. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the ``meshed_region`` argument and provide a mesh. + + .. jupyter-execute:: + + # Get a single field + stress_XX = stress_XX_fc[0] + + # Plot the contour on the mesh + stress_XX.plot(meshed_region=meshed_region_1) + + .. tab-item:: MeshedRegion.plot() + + Use the |MeshedRegion.plot| method [1]_. + You must use the *'field_or_fields_container'* argument and + give the |Field| or the |FieldsContainer| containing the stress results data. + + .. jupyter-execute:: + + # Plot the mesh with the stress field contour + meshed_region_1.plot(field_or_fields_container=stress_XX) + + .. tab-item:: DpfPlotter + + First create an instance of |DpfPlotter| [2]_. Then, add the |Field| to the scene using the |add_field| method. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the *'meshed_region'* argument and provide a mesh. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the field to the scene, here with an explicitly associated mesh + plotter_1.add_field(field=stress_XX, meshed_region=meshed_region_1) + + # Display the scene + plotter_1.show_figure() + + You can also first use the |add_mesh| method to add the mesh to the scene + and then use |add_field| without the ``meshed_region`` argument. + + +Plot a contour of multiple fields +--------------------------------- + +Prepare a collection of fields +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. warning:: + + The fields should not have conflicting data, meaning you cannot build a contour for two fields + with two different sets of data for the same mesh entities (intersecting scopings). + + This means the following methods are for example not available for a collection made of the same field + varying across time, or a collection of fields for different shell layers of the same elements. + +Here we split the field for XX stress based on material to get a collection of fields with non-conflicting associated mesh entities. + +We use the |split_fields| operator to split the field based on the result of the |split_mesh| operator. +The |split_mesh| operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, the split criterion is the material ID. + +.. jupyter-execute:: + + # Split the field based on material property + fields = ( + ops.mesh.split_fields( + field_or_fields_container=stress_XX_fc, + meshes=ops.mesh.split_mesh(mesh=meshed_region_1, property="mat"), + ) + ).eval() + + # Show the result + print(fields) + +For ``MAPDL`` results the split on material is equivalent to a split on ``bodies``, hence the two equivalent labels. + +Plot the contour +^^^^^^^^^^^^^^^^ + +To plot a contour for multiple |Field| objects, you can use: + +- the |FieldsContainer.plot| method if the fields are in a collection +- the |MeshedRegion.plot| method with the field collection as argument +- the |DpfPlotter| class and several calls to its |add_field| method + +.. hint:: + + Using the |DpfPlotter| class is more performant than using the |Field.plot| method + +.. tab-set:: + + .. tab-item:: FieldsContainer.plot() + + Use the |FieldsContainer.plot| method [1]_. + + .. jupyter-execute:: + + # Plot the contour for all fields in the collection + fields.plot() + + The ``label_space`` argument provides further field filtering capabilities. + + .. jupyter-execute:: + + # Plot the contour for ``mat`` 1 only + fields.plot(label_space={"mat":1}) + + .. tab-item:: MeshedRegion.plot() + + Use the |MeshedRegion.plot| method [1]_. + You must use the *'field_or_fields_container'* argument and + give the |Field| or the |FieldsContainer| containing the stress results data. + + .. jupyter-execute:: + + # Plot the mesh with the stress field contours + meshed_region_1.plot(field_or_fields_container=fields) + + .. tab-item:: DpfPlotter + + First create an instance of |DpfPlotter| [2]_. + Then, add each |Field| to the scene using the |add_field| method. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the *'meshed_region'* argument and provide a mesh. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add each field to the scene + plotter_1.add_field(field=fields[0]) + plotter_1.add_field(field=fields[1]) + + # Display the scene + plotter_1.show_figure() + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst b/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst new file mode 100644 index 00000000000..b0fd1bca2ab --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst @@ -0,0 +1,227 @@ +.. _ref_tutorials_plot_deformed_mesh: + +========================== +Plot with mesh deformation +========================== + +.. include:: ../../../links_and_refs.rst + +.. |Model.plot| replace:: :py:meth:`Model.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |MeshesContainer.plot| replace:: :py:meth:`MeshesContainer.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh()` +.. |add_field| replace:: :py:meth:`add_field()` +.. |show_figure| replace:: :py:meth:`show_figure()` +.. |split_mesh| replace:: :py:class:`split_mesh ` +.. |disp_op| replace:: :py:class:`displacement operator ` + +This tutorial shows different commands for plotting a deformed mesh without data. + +A mesh is represented in DPF by a |MeshedRegion| object. +You can store multiple |MeshedRegion| in a DPF collection called |MeshesContainer|. + +You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. +For more information, see the :ref:`ref_tutorials_create_a_mesh_from_scratch` and +:ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +For this tutorial, we use mesh information from a case available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Download and get the path to an example result file + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Get the deformation field +------------------------- + +To deform the mesh, we need a nodal 3D vector field specifying the translation of each node in the mesh. + +The following DPF objects are able to return or represent such a field +and are accepted inputs for the deformation parameter of plot methods: + +- A |Field| +- A |FieldsContainer| +- A |Result| +- An |Operator| + +Here, we use the |disp_op| which outputs a nodal 3D vector field of distances. + +One can get the operator from the |Model| with the source of data already connected. +For more information about extracting results from a result file, +see the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Get the displacement operator for this model + disp_op = model_1.results.displacement() + +You can apply a scale factor to the deformation for every method in this tutorial +by passing in the ``scale_factor`` argument. + +.. jupyter-execute:: + + # Define the scale factor + scl_fct = 2.0 + +.. _ref_plot_deformed_mesh_with_model: + +Plot a deformed model +--------------------- + +You can directly plot the overall mesh loaded by the model with |Model.plot| [1]_. +To plot it with deformation, use the *'deform_by'* argument and provide the displacement operator. + +.. jupyter-execute:: + + # Plot the deformed mesh + model_1.plot(deform_by=disp_op, scale_factor=scl_fct) + +You can apply a scale factor to the deformation for every method in this tutorial. + +.. jupyter-execute:: + + # Define the scale factor + scl_fct = 2.0 + +.. _ref_plot_deformed_mesh_with_meshed_region: + +Plot a single mesh +------------------ + +Get the mesh +^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot the mesh +^^^^^^^^^^^^^ + +To plot the deformed |MeshedRegion| you can use: + +- The |MeshedRegion.plot| method; +- The |DpfPlotter| object. + +.. tab-set:: + + .. tab-item:: MeshedRegion.plot() method + + Use the |MeshedRegion.plot| method [1]_ of the |MeshedRegion| object we defined. + Add deformation by providing our displacement operator to the *'deform_by'* argument. + + .. jupyter-execute:: + + # Plot the deformed mesh + meshed_region_1.plot(deform_by=disp_op, scale_factor=scl_fct) + + .. tab-item:: DpfPlotter object + + To plot the mesh with this approach, first create an instance of |DpfPlotter| [2]_. + Then, add the |MeshedRegion| to the scene using the |add_mesh| method. + Add deformation by providing our displacement operator to the *'deform_by'* argument. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the mesh to the scene with deformation + plotter_1.add_mesh(meshed_region=meshed_region_1, + deform_by=disp_op, + scale_factor=scl_fct) + + # Display the scene + plotter_1.show_figure() + +You can also plot data contours on a deformed mesh. For more information, see :ref:`ref_tutorials_plot_contour` + +.. _ref_plot_deformed_mesh_with_meshes_container: + +Plot several meshes +------------------- + +Build a collection of meshes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are different ways to obtain a |MeshesContainer|. +You can for example split a |MeshedRegion| using operators. + +Here, we use the |split_mesh| operator to split the mesh based on the material of each element. +This operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, each mesh has a *'mat'* label. +For more information about how to get a split mesh, see the :ref:`ref_tutorials_split_mesh` +and :ref:`ref_tutorials_extract_mesh_in_split_parts` tutorials. + +.. jupyter-execute:: + + # Split the mesh based on material property + meshes = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Show the result + print(meshes) + +Plot the meshes +^^^^^^^^^^^^^^^ + +Use the |MeshesContainer.plot| method [1]_ of the |MeshesContainer| object we defined. +Provide the displacement operator to the *'deform_by'* argument to add mesh deformation. + +This method plots all the |MeshedRegion| objects stored in the |MeshesContainer| +and colors them based on the property used to split the mesh. + +.. jupyter-execute:: + + # Plot the deformed mesh + meshes.plot(deform_by=disp_op, scale_factor=scl_fct) + +You can also plot data on a collection of deformed meshes. +For more information, see :ref:`_ref_tutorials_plot_contour` + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/plot/plot_mesh.rst b/doc/source/user_guide/tutorials/plot/plot_mesh.rst new file mode 100644 index 00000000000..62db0045131 --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_mesh.rst @@ -0,0 +1,174 @@ +.. _ref_tutorials_plot_mesh: + +=========== +Plot a mesh +=========== + +.. include:: ../../../links_and_refs.rst + +.. |Model.plot| replace:: :py:meth:`Model.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |MeshesContainer.plot| replace:: :py:meth:`MeshesContainer.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh() ` +.. |show_figure| replace:: :py:meth:`show_figure() ` +.. |split_mesh| replace:: :py:class:`split_mesh ` + +This tutorial shows different commands for plotting a mesh without data. + +A mesh is represented in DPF by a |MeshedRegion| object. +You can store multiple |MeshedRegion| in a DPF collection called |MeshesContainer|. + +You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. +For more information, see the :ref:`ref_tutorials_create_a_mesh_from_scratch` and +:ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +For this tutorial, we use mesh information from a case available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Download and get the path to an example result file + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Plot a model +------------ + +You can directly plot the overall mesh loaded by the model with |Model.plot| [1]_. + +.. jupyter-execute:: + + # Plot the mesh + model_1.plot() + +Plot a single mesh +------------------ + +Get the mesh +^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot the mesh +^^^^^^^^^^^^^ + +To plot the |MeshedRegion| you can use: + +- The |MeshedRegion.plot| method; +- The |DpfPlotter| object. + +.. tab-set:: + + .. tab-item:: MeshedRegion.plot() method + + Use the |MeshedRegion.plot| method [1]_ of the |MeshedRegion| object we defined. + + .. jupyter-execute:: + + # Plot the mesh object + meshed_region_1.plot() + + .. tab-item:: DpfPlotter object + + To plot the mesh with this approach, first create an instance of |DpfPlotter| [2]_. + Then, add the |MeshedRegion| to the scene using the |add_mesh| method. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the mesh to the scene + plotter_1.add_mesh(meshed_region=meshed_region_1) + + # Display the scene + plotter_1.show_figure() + +You can also plot data contours on a mesh. For more information, see :ref:`ref_tutorials_plot_contour` + +Plot several meshes +------------------- + +Build a collection of meshes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are different ways to obtain a |MeshesContainer|. +You can for example split a |MeshedRegion| using operators. + +Here, we use the |split_mesh| operator to split the mesh based on the material of each element. +This operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, each mesh has a *'mat'* label. +For more information about how to get a split mesh, see the :ref:`ref_tutorials_split_mesh` +and :ref:`ref_tutorials_extract_mesh_in_split_parts` tutorials. + +.. jupyter-execute:: + + # Split the mesh based on material property + meshes = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Show the result + print(meshes) + +Plot the meshes +^^^^^^^^^^^^^^^ + +Use the |MeshesContainer.plot| method [1]_ of the |MeshesContainer| object we defined. + +This method plots all the |MeshedRegion| objects stored in the |MeshesContainer| +and colors them based on the property used to split the mesh. + +.. jupyter-execute:: + + # Plot the collection of meshes + meshes.plot() + +You can also plot data on a collection of meshes. +For more information, see :ref:`ref_tutorials_plot_contour` + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst b/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst new file mode 100644 index 00000000000..441d1529f88 --- /dev/null +++ b/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst @@ -0,0 +1,145 @@ +.. _tutorials_main_steps: + +Postprocessing main steps +------------------------- + +There are four main steps to transform simulation data into output data that can +be used to visualize and analyze simulation results: + +.. grid:: + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: 1 + :link: tutorials_main_steps_1 + :link-type: ref + :text-align: center + + Importing and opening results files + + .. grid-item-card:: 2 + :link: tutorials_main_steps_2 + :link-type: ref + :text-align: center + + Access and extract results + + .. grid-item-card:: 3 + :link: tutorials_main_steps_3 + :link-type: ref + :text-align: center + + Transform available data + + .. grid-item-card:: 4 + :link: tutorials_main_steps_4 + :link-type: ref + :text-align: center + + Visualize the data + + +:jupyter-download-script:`Download tutorial as Python script<01-main-steps>` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook<01-main-steps>` + +.. _tutorials_main_steps_1: + +1- Import and open results files +******************************** + +First, import the DPF-Core module as ``dpf`` and import the included examples file + +.. jupyter-execute:: + + # Import the ansys.dpf.core module as ``dpf`` + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + +`DataSources' is a class that manages paths to their files. Use this object to declare +data inputs for DPF and define their locations. + +.. jupyter-execute:: + + # Define the DataSources object + my_data_sources = dpf.DataSources(result_path=examples.find_simple_bar()) + + +The :class:`Model ` class creates and evaluates common readers for the files it is given, +such as a mesh provider, a result info provider, and a streams provider. +It provides dynamically built methods to extract the results available in the files, as well as many shortcuts +to facilitate exploration of the available data. + +Printing the model displays: + + - Analysis type + - Available results + - Size of the mesh + - Number of results + +.. jupyter-execute:: + + # Define the Model object + my_model = dpf.Model(data_sources=my_data_sources) + print(my_model) + +.. _tutorials_main_steps_2: + +2- Access and extract results +***************************** + +We see in the model that a displacement result is available. You can access this result by: + +.. jupyter-execute:: + + # Define the displacement results through the models property `results` + my_displacements = my_model.results.displacement.eval() + print(my_displacements) + +The displacement data can be extract by: + +.. jupyter-execute:: + + # Extract the data of the displacement field + my_displacements_0 = my_displacements[0].data + print(my_displacements_0) + +.. _tutorials_main_steps_3: + +3- Transform available data +*************************** + +Several transformations can be made with the data. They can be a single operation, +by using only one operator, or they can represent a succession of operations, by defining a +workflow with chained operators. + +Here we star by computing the displacements norm. + +.. jupyter-execute:: + + # Define the norm operator (here for a fields container) for the displacement + my_norm = ops.math.norm_fc(fields_container=my_displacements).eval() + print(my_norm[0].data) + +Then we compute the maximum values of the normalised displacement + +.. jupyter-execute:: + + # Define the maximum operator and chain it to the norm operator + my_max= ops.min_max.min_max_fc(fields_container=my_norm).outputs.field_max() + print(my_max) + +.. _tutorials_main_steps_4: + +4- Visualize the data +********************* + +Plot the transformed displacement results + +.. jupyter-execute:: + + # Define the support of the plot (here we plot the displacement over the mesh) + my_model.metadata.meshed_region.plot(field_or_fields_container=my_displacements) diff --git a/doc/source/user_guide/tutorials/post_processing_basics/index.rst b/doc/source/user_guide/tutorials/post_processing_basics/index.rst new file mode 100644 index 00000000000..b98cf33a341 --- /dev/null +++ b/doc/source/user_guide/tutorials/post_processing_basics/index.rst @@ -0,0 +1,14 @@ +.. _ref_tutorials_processing_basics: + +====================== +Processing data basics +====================== + +Data Processing consists in a series of operations applied to data to achieve a goal. DPF enables +you to access and transform simulation data using customizable workflows. + +There is an extensive catalog of operators with different kinds and complexity that can be used together. + +The tutorials in this section presents a basic application of PyDPF-Core as post-processing tool. + +.. include:: 01-main-steps.rst \ No newline at end of file diff --git a/doc/sphinx_utilities/version_filtering.py b/doc/sphinx_utilities/version_filtering.py new file mode 100644 index 00000000000..d3609e84e34 --- /dev/null +++ b/doc/sphinx_utilities/version_filtering.py @@ -0,0 +1,25 @@ +# +from pathlib import Path + + +def get_tutorial_version_requirements(tutorial_path: str) -> str: + note_flag = r".. note::" + version_flag = "This tutorial requires DPF" + previous_line_is_note = False + minimum_version = "0.0" + tutorial_path = Path(tutorial_path) + skip_empty_line = False + with tutorial_path.open(mode="rt", encoding="utf-8") as tutorial_file: + for line in tutorial_file: + if (version_flag in line) and previous_line_is_note: + minimum_version = line.strip(version_flag).split()[0] + break + if note_flag in line: + previous_line_is_note = True + skip_empty_line = True + else: + if skip_empty_line: + skip_empty_line = False + else: + previous_line_is_note = False + return minimum_version diff --git a/examples/08-python-operators/02-python_operators_with_dependencies.py b/examples/08-python-operators/02-python_operators_with_dependencies.py index 3bc138d71d4..436eae3de78 100644 --- a/examples/08-python-operators/02-python_operators_with_dependencies.py +++ b/examples/08-python-operators/02-python_operators_with_dependencies.py @@ -57,13 +57,36 @@ # Download the ``gltf_plugin`` plug-in package that has already been # created for you. -import os from pathlib import Path -from ansys.dpf.core import examples +print("\033[1m gltf_plugin") +file_list = [ + "gltf_plugin/__init__.py", + "gltf_plugin/operators.py", + "gltf_plugin/operators_loader.py", + "gltf_plugin/requirements.txt", + "gltf_plugin/gltf_export.py", + "gltf_plugin/texture.png", + "gltf_plugin.xml", +] -plugin_path = Path(examples.download_gltf_plugin()) folder_root = Path(str(Path.cwd()).rsplit("pydpf-core", 1)[0]) / "pydpf-core" +source_path_in_repo = r"doc\source\examples\07-python-operators\plugins" +operator_folder = Path(folder_root) / Path(source_path_in_repo) +print(operator_folder) +plugin_path = None + +for file in file_list: + operator_file_path = Path(operator_folder) / Path(file) + + print(f"\033[1m {file}\n \033[0m") + if (Path(file).suffix in [".py", ".xml"]) and file != "gltf_plugin/gltf_export.py": + with Path(operator_file_path).open(mode="r") as f: + for line in f.readlines(): + print("\t\t\t" + line) + print("\n\n") + if plugin_path is None: + plugin_path = Path(operator_file_path).parent # %% # To add third-party modules as dependencies to a plug-in package, you must @@ -84,9 +107,8 @@ # To simplify this step, you can add a requirements file in the plug-in package: # print("\033[1m gltf_plugin/requirements.txt: \n \033[0m") -requirements_path = plugin_path / "requirements.txt" -with requirements_path.open("r") as file: - for line in file.readlines(): +with (Path(plugin_path) / "requirements.txt").open(mode="r") as f: + for line in f.readlines(): print("\t\t\t" + line) @@ -94,9 +116,9 @@ # Download the script for your operating system. # # - For Windows, download this -# :download:`PowerShell script `. +# :download:`PowerShell script `. # - For Linux, download this -# :download:`Shell script `. +# :download:`Shell script `. # # Run the downloaded script with the mandatory arguments: # @@ -119,21 +141,27 @@ # # create_sites_for_python_operators.sh -pluginpath /path/to/plugin -zippath /path/to/plugin/assets/linx64.zip # noqa: E501 -site_path = plugin_path / "assets" / "gltf_sites_winx64.zip" -if os.name == "nt" and not site_path.exists(): +import os + +if os.name == "nt" and not (Path(plugin_path) / "assets" / "gltf_sites_winx64.zip").exists(): cmd_file = ( - folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.ps1" + Path(folder_root) + / "doc" + / "source" + / "user_guide" + / "tutorials" + / "custom_operators_and_plugins" + / "create_sites_for_python_operators.ps1" ) args = [ "powershell", - str(cmd_file), + cmd_file, "-pluginpath", - str(plugin_path), + plugin_path, "-zippath", - str(plugin_path / "assets" / "gltf_sites_winx64.zip"), + Path(plugin_path) / "assets" / "gltf_sites_winx64.zip", ] print(args) - import subprocess process = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -147,15 +175,20 @@ ) else: print("Installing pygltf in a virtual environment succeeded") - -elif os.name == "posix" and not site_path.exists(): +elif os.name == "posix" and not (Path(plugin_path) / "assets" / "gltf_sites_linx64.zip").exists(): cmd_file = ( - folder_root / "doc" / "source" / "user_guide" / "create_sites_for_python_operators.sh" + Path(folder_root) + / "doc" + / "source" + / "user_guide" + / "tutorials" + / "custom_operators_and_plugins" + / "create_sites_for_python_operators.sh" ) run_cmd = f"{cmd_file}" args = ( f' -pluginpath "{plugin_path}" ' - f'-zippath "{plugin_path / "assets" / "gltf_sites_winx64.zip"}"' + f"-zippath \"{Path(plugin_path)/'assets'/'gltf_sites_linx64.zip'}\"" ) print(run_cmd + args) os.system(f"chmod u=rwx,o=x {cmd_file}") @@ -181,14 +214,14 @@ # Python plugins are not supported in process. dpf.start_local_server(config=dpf.AvailableServerConfigs.GrpcServer) -tmp = Path(dpf.make_tmp_dir_server()) -dpf.upload_files_in_folder(dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), plugin_path) +tmp = dpf.make_tmp_dir_server() +dpf.upload_files_in_folder(dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), plugin_path) dpf.upload_file( - str(plugin_path) + ".xml", dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin.xml") + str(plugin_path) + ".xml", dpf.path_utilities.join(tmp, "plugins", "gltf_plugin.xml") ) dpf.load_library( - dpf.path_utilities.join(str(tmp), "plugins", "gltf_plugin"), + dpf.path_utilities.join(tmp, "plugins", "gltf_plugin"), "py_dpf_gltf", "load_operators", ) @@ -237,14 +270,14 @@ displacement = model.results.displacement() displacement.inputs.mesh_scoping(skin_mesh) displacement.inputs.mesh(skin_mesh) -new_operator.inputs.path(str(tmp / "out")) +new_operator.inputs.path(Path(tmp) / "out") new_operator.inputs.mesh(skin_mesh) new_operator.inputs.field(displacement.outputs.fields_container()[0]) new_operator.run() print("operator ran successfully") -dpf.download_file(tmp / "out.glb", Path.cwd() / "out.glb") +dpf.download_file(Path(tmp) / "out.glb", Path.cwd() / "out.glb") # %% # You can download :download:`output ` from the ``gltf`` operator. diff --git a/requirements/requirements_docs.txt b/requirements/requirements_docs.txt index 993c0aa53b0..8951320166b 100644 --- a/requirements/requirements_docs.txt +++ b/requirements/requirements_docs.txt @@ -3,6 +3,8 @@ enum-tools[sphinx]==0.13.0 graphviz==0.21 imageio==2.37.0 imageio-ffmpeg==0.6.0 +jupyter_sphinx==0.5.3 +nbsphinx==0.9.5 pypandoc==1.15 pytest-sphinx==0.6.3 pyvista==0.45.3 diff --git a/src/ansys/dpf/core/dimensionality.py b/src/ansys/dpf/core/dimensionality.py index 6692781b40e..432e0c90f09 100644 --- a/src/ansys/dpf/core/dimensionality.py +++ b/src/ansys/dpf/core/dimensionality.py @@ -67,6 +67,8 @@ def __init__(self, dim_vec=None, nature: natures = natures.vector): self.dim = [3, 3] elif self.nature == natures.scalar: self.dim = [1] + elif self.nature == natures.matrix: + self.dim = [3, 3] def is_1d_dim(self): """Check if dimensionality is 1.""" diff --git a/src/ansys/dpf/core/examples/examples.py b/src/ansys/dpf/core/examples/examples.py index 53ae359f823..6569b0541d2 100644 --- a/src/ansys/dpf/core/examples/examples.py +++ b/src/ansys/dpf/core/examples/examples.py @@ -29,7 +29,7 @@ from ansys.dpf.core.core import upload_file_in_tmp_folder -def get_example_required_minimum_dpf_version(file: os.PathLike) -> str: +def get_example_required_minimum_dpf_version(file: str) -> str: """Return the minimal DPF server version required to run the example, as declared in a note. Parameters diff --git a/src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py b/src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py new file mode 100644 index 00000000000..ca646218f04 --- /dev/null +++ b/src/ansys/dpf/core/examples/python_plugins/custom_operator_example.py @@ -0,0 +1,139 @@ +# Copyright (C) 2020 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Example of a custom DPF operator in Python.""" + +from ansys.dpf import core as dpf +from ansys.dpf.core.custom_operator import CustomOperatorBase +from ansys.dpf.core.operator_specification import ( + CustomSpecification, + PinSpecification, + SpecificationProperties, +) + + +class CustomOperator(CustomOperatorBase): + """Example of a custom DPF operator coded in Python.""" + + @property + def name(self): + """Return the scripting name of the operator in Snake Case.""" + return "my_custom_operator" + + @property + def specification(self) -> CustomSpecification: + """Create the specification of the custom operator. + + The specification declares: + - the description of the operator + - the inputs of the operator + - the outputs of the operator + - the properties of the operator (a username, a category, a required license) + - the changelog of the operator (starting with DPF 2026 R1) + """ + # Instantiate the custom specification + spec = CustomSpecification() + # Set the description of the operator + spec.description = "What the Operator does. You can use MarkDown and LaTeX in descriptions." + # Define the inputs of the operator if any + spec.inputs = { + 0: PinSpecification( + name="input_0", + type_names=[dpf.Field, dpf.FieldsContainer], + document="Describe input pin 0.", + ), + } + # Define the outputs of the operator if any + spec.outputs = { + 0: PinSpecification( + name="output_0", type_names=[dpf.Field], document="Describe output pin 0." + ), + } + # Define the properties of the operator if any + spec.properties = SpecificationProperties( + user_name="my custom operator", # Optional, defaults to the scripting name with spaces + category="my_category", # Optional, defaults to 'other' + license="any_dpf_supported_increments", # Optional, defaults to None + ) + + # Operator changelog and versioning is only available after DPF 2025R2 + try: + from ansys.dpf.core.changelog import Changelog + + # Set the changelog of the operator to track changes + spec.set_changelog( + Changelog() + .patch_bump("Describe a patch bump.") + .major_bump("Describe a major bump.") + .minor_bump("Describe a minor bump.") + .expect_version("1.1.0") # Checks the resulting version is as expected + ) + except ModuleNotFoundError as e: + if "ansys.dpf.core.changelog" in e.msg: + pass + else: + raise e + + return spec + + def run(self): + """Run the operator and execute the logic implemented here. + + This method defines the behavior of the operator. + + Request the inputs with the method ``get_input``, + perform operations on the data, + then set the outputs with the method ``set_output``, + and finally call ``set_succeeded``. + + In this example, the operator changes the name of a Field. + + """ + # First get the field in input by calling get_input for the different types supported + # # Try requesting the input as a Field + field: dpf.Field = self.get_input(0, dpf.Field) + # # If function returns None, there is no Field connected to this input + if field is None: + # # Try requesting the input as a FieldsContainer + field: dpf.FieldsContainer = self.get_input(0, dpf.FieldsContainer).get_field(0) + # # If the input is optional, set its default value + # # If the input is not optional and empty, raise an error + if field is None: + raise ValueError( + "my_custom_operator: mandatory input 'input_0' is empty or of an unsupported type." + ) + + # Perform some operations on the data + field.name = "new_field_name" + + # Set the output of the operator + self.set_output(0, field) + + # And declare the operator run a success + self.set_succeeded() + + +def load_operators(*args): + """Mandatory entry-point for the server to record the operators of the plugin.""" + from ansys.dpf.core.custom_operator import record_operator + + record_operator(CustomOperator, *args) diff --git a/src/ansys/dpf/core/field.py b/src/ansys/dpf/core/field.py index 6605cddc495..259ddfaef00 100644 --- a/src/ansys/dpf/core/field.py +++ b/src/ansys/dpf/core/field.py @@ -50,6 +50,7 @@ from ansys.dpf.gate.errors import DPFServerException if TYPE_CHECKING: # pragma: nocover + from ansys.dpf.core.dimensionality import Dimensionality from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.meshed_region import MeshedRegion from ansys.dpf.core.results import Result @@ -442,7 +443,7 @@ def get_entity_data_by_id(self, id: int) -> dpf_array.DPFArray: data.shape = (data.size // n_comp, n_comp) return data - def append(self, data, scopingid): + def append(self, data: float | list[float] | np.ndarray[np.float64], scopingid: int): """Append data to the Field.""" if isinstance(data, list): if isinstance(data[0], list): @@ -520,7 +521,7 @@ def to_nodal(self): def plot( self, shell_layers: eshell_layers = None, - deform_by: Union[Field, Result, Operator] = None, + deform_by: Field | Result | Operator = None, scale_factor: float = 1.0, meshed_region: MeshedRegion = None, **kwargs, @@ -675,7 +676,7 @@ def dimensionality(self): return self.field_definition.dimensionality @dimensionality.setter - def dimensionality(self, value): + def dimensionality(self, value: Dimensionality): fielddef = self.field_definition fielddef.dimensionality = value self.field_definition = fielddef diff --git a/src/ansys/dpf/core/field_definition.py b/src/ansys/dpf/core/field_definition.py index fb87d249812..037e61e449b 100644 --- a/src/ansys/dpf/core/field_definition.py +++ b/src/ansys/dpf/core/field_definition.py @@ -262,7 +262,7 @@ def shell_layers(self, value): self._api.csfield_definition_set_shell_layers(self, value) @dimensionality.setter - def dimensionality(self, value): + def dimensionality(self, value: Dimensionality): if not isinstance(value, Dimensionality): raise TypeError("the dimensionality needs to be of type Dimensionality") self._api.csfield_definition_set_dimensionality( diff --git a/src/ansys/dpf/core/fields_factory.py b/src/ansys/dpf/core/fields_factory.py index fd107041996..11964cc0517 100644 --- a/src/ansys/dpf/core/fields_factory.py +++ b/src/ansys/dpf/core/fields_factory.py @@ -26,30 +26,39 @@ Contains functions to simplify creating fields. """ +from __future__ import annotations + +from typing import TYPE_CHECKING + import numpy as np from ansys.dpf.core import Field, server as server_module from ansys.dpf.core.common import locations, natures from ansys.dpf.gate import field_capi, field_grpcapi +if TYPE_CHECKING: # pragma: nocover + from ansys.dpf.core.server_types import AnyServerType + -def field_from_array(arr, server=None): +def field_from_array( + arr: list | np.ndarray, + server: AnyServerType = None, +) -> Field: """Create a DPF vector or scalar field from a numpy array or a Python list. Parameters ---------- - arr : np.ndarray or List + arr: Numpy array or Python list containing either 1 or 3 dimensions. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field - Field constructed from the array. + field: + DPF field constructed from the array. """ from ansys.dpf.core import Field, natures @@ -83,7 +92,13 @@ def field_from_array(arr, server=None): return field -def create_matrix_field(num_entities, num_lines, num_col, location=locations.nodal, server=None): +def create_matrix_field( + num_entities, + num_lines, + num_col, + location: locations = locations.nodal, + server: AnyServerType = None, +) -> Field: """Create a matrix :class:`ansys.dpf.core.Field`. This field contains entities that have a matrix format. This is a "reserve" mechanism, @@ -91,24 +106,22 @@ def create_matrix_field(num_entities, num_lines, num_col, location=locations.nod Parameters ---------- - num_entities : int + num_entities: Number of entities to reserve. num_lines : int Number of matrix line. num_col : int Number of matrix columns. - location : str, optional + location: Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field of the requested format. Examples @@ -123,14 +136,18 @@ def create_matrix_field(num_entities, num_lines, num_col, location=locations.nod return _create_field( server=server, nature=natures.matrix, - nentities=num_entities, + n_entities=num_entities, location=location, ncomp_m=num_col, ncomp_n=num_lines, ) -def create_3d_vector_field(num_entities, location=locations.nodal, server=None): +def create_3d_vector_field( + num_entities, + location: locations = locations.nodal, + server: AnyServerType = None, +) -> Field: """Create a specific :class:`ansys.dpf.core.Field` with entities that have 3D vector format. This is a "reserve" mechanism, not a resize one. This means that you @@ -138,21 +155,18 @@ def create_3d_vector_field(num_entities, location=locations.nodal, server=None): Parameters ---------- - num_entities : int + num_entities: Number of entities to reserve - - location : str, optional + location: Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field of the requested format. Examples @@ -166,7 +180,11 @@ def create_3d_vector_field(num_entities, location=locations.nodal, server=None): return _create_field(server, natures.vector, num_entities, location) -def create_tensor_field(num_entities, location=locations.nodal, server=None): +def create_tensor_field( + num_entities, + location: locations = locations.nodal, + server: AnyServerType = None, +) -> Field: """Create a specific :class:`ansys.dpf.core.Field` with entities that have a 3*3 format. This is a "reserve" mechanism, not a resize one. This means that you @@ -174,20 +192,18 @@ def create_tensor_field(num_entities, location=locations.nodal, server=None): Parameters ---------- - num_entities : int + num_entities: Number of entities to reserve. - location : str, optional + location: Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field in the requested format. Examples @@ -201,7 +217,11 @@ def create_tensor_field(num_entities, location=locations.nodal, server=None): return _create_field(server, natures.symmatrix, num_entities, location) -def create_scalar_field(num_entities, location=locations.nodal, server=None): +def create_scalar_field( + num_entities, + location: locations = locations.nodal, + server: AnyServerType = None, +) -> Field: """Create a specific `:class:`ansys.dpf.core.Field` with entities that are scalar. This is a "reserve" mechanism, not a resize one. This means that you @@ -209,20 +229,18 @@ def create_scalar_field(num_entities, location=locations.nodal, server=None): Parameters ---------- - num_entities : int + num_entities: Number of entities to reserve - location : str, optional + location: Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field in the requested format. Examples @@ -236,7 +254,12 @@ def create_scalar_field(num_entities, location=locations.nodal, server=None): return _create_field(server, natures.scalar, num_entities, location) -def create_vector_field(num_entities, num_comp, location=locations.nodal, server=None): +def create_vector_field( + num_entities, + num_comp, + location: locations = locations.nodal, + server: AnyServerType = None, +) -> Field: """Create a specific `:class:`ansys.dpf.core.Field` with entities that have a vector format. This is a "reserve" mechanism, not a resize one. This means that you @@ -244,22 +267,20 @@ def create_vector_field(num_entities, num_comp, location=locations.nodal, server Parameters ---------- - num_entities : int + num_entities: Number of entities to reserve. - num_comp : int + num_comp: Number of vector components. - location : str, optional + location: Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field in the requested format. Examples @@ -275,8 +296,12 @@ def create_vector_field(num_entities, num_comp, location=locations.nodal, server def create_overall_field( - value, nature, num_entities, num_comp, location=locations.overall, server=None -): + value: float, + nature: natures = natures.scalar, + num_entities: int = 1, + num_comp: int = 1, + server: AnyServerType = None, +) -> Field: """Create a specific `:class:`ansys.dpf.core.Field` with entities that have an overall location. Regarding the nature of the entity contained in the field, we set the same value @@ -284,29 +309,26 @@ def create_overall_field( Parameters ---------- - value : float + value: Value of the entity - nature : str + nature: Nature of the field entity data. For example: - :class:`ansys.dpf.core.natures.matrix` - :class:`ansys.dpf.core.natures.scalar` - num_entities : int + + num_entities: Number of entities to reserve. - num_comp : int + num_comp: Number of vector components. - location : str, optional - Location of the field. Options are in :class:`locations `. - The default is ``dpf.locations.nodal``. - - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. Returns ------- - field : Field + field: DPF field in the requested format. Examples @@ -318,13 +340,22 @@ def create_overall_field( >>> field = fields_factory.create_overall_field(1.0, natures.scalar, 10, 1) """ - overall_field = _create_field(server, nature, num_entities, location, ncomp_n=num_comp) + overall_field = _create_field( + server, nature, num_entities, location=locations.overall, ncomp_n=num_comp + ) for i in range(num_entities): overall_field.append(value, i) return overall_field -def _create_field(server, nature, nentities, location=locations.nodal, ncomp_n=0, ncomp_m=0): +def _create_field( + server: AnyServerType = None, + nature: natures = natures.scalar, + n_entities: int = 0, + location: locations = locations.nodal, + ncomp_n: int = 0, + ncomp_m: int = 0, +) -> Field: """Create a specific :class:`ansys.dpf.core.Field`. This is a "reserve" mechanism, not a resize one. This means that you @@ -332,45 +363,41 @@ def _create_field(server, nature, nentities, location=locations.nodal, ncomp_n=0 Parameters ---------- - server : ansys.dpf.core.server, optional + server: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. - nature : str + nature: Nature of the field entity data. For example: - :class:`ansys.dpf.core.natures.matrix` - :class:`ansys.dpf.core.natures.scalar` - nentities : int + n_entities: Number of entities to reserve. - - location : str, optional + location: Location of the field. Options are in :class:`locations `. The default is ``dpf.locations.nodal``. - - ncomp_n : int + ncomp_n: Number of lines. - ncomp_m : int + ncomp_m: Number of columns. Returns ------- - field : Field + field: DPF field in the requested format. """ - if server is None: - server = server_module.get_or_create_server(server) + server = server_module.get_or_create_server(server) api = server.get_api_for_type(capi=field_capi.FieldCAPI, grpcapi=field_grpcapi.FieldGRPCAPI) api.init_field_environment(server) internal_obj = Field._field_create_internal_obj( api=api, client=server.client, nature=nature, - nentities=nentities, - location=location, + nentities=n_entities, + location=str(location), ncomp_n=ncomp_n, ncomp_m=ncomp_m, ) - field = Field(field=internal_obj, server=server) - return field + return Field(field=internal_obj, server=server) diff --git a/src/ansys/dpf/core/property_field.py b/src/ansys/dpf/core/property_field.py index 67b6b844a29..6434db8c5dc 100644 --- a/src/ansys/dpf/core/property_field.py +++ b/src/ansys/dpf/core/property_field.py @@ -164,10 +164,8 @@ def location(self): 'Nodal' """ - if self.scoping: - return self.scoping.location - else: - return None + location = self.scoping.location + return location if location else None @location.setter def location(self, value): @@ -190,12 +188,7 @@ def location(self, value): 'Nodal' """ - if self.scoping: - self.scoping.location = value - else: - raise Exception( - "Property field location is based on scoping, and scoping is not defined" - ) + self.scoping.location = value @property def component_count(self): diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 305bfdd6f88..68a1d38528d 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -26,6 +26,8 @@ Contains the directives necessary to start the DPF server. """ +from __future__ import annotations + import copy import functools import inspect @@ -397,7 +399,7 @@ def connect(): raise e -def get_or_create_server(server: BaseServer) -> Union[BaseServer, None]: +def get_or_create_server(server: BaseServer | None) -> Union[BaseServer, None]: """Return the given server or if None, creates a new one. Parameters diff --git a/src/ansys/dpf/core/string_field.py b/src/ansys/dpf/core/string_field.py index a66e57ddfa1..b767aab7b6f 100644 --- a/src/ansys/dpf/core/string_field.py +++ b/src/ansys/dpf/core/string_field.py @@ -143,10 +143,8 @@ def location(self): 'Nodal' """ - if self.scoping: - return self.scoping.location - else: - return None + location = self.scoping.location + return location if location else None @location.setter def location(self, value): @@ -168,12 +166,7 @@ def location(self, value): 'Nodal' """ - if self.scoping: - self.scoping.location = value - else: - raise Exception( - "Property field location is based on scoping, and scoping is not defined" - ) + self.scoping.location = value @property def component_count(self):