diff --git a/doc/source/changelog/1018.miscellaneous.md b/doc/source/changelog/1018.miscellaneous.md index 97d5e2c7f..47bdfd5d6 100644 --- a/doc/source/changelog/1018.miscellaneous.md +++ b/doc/source/changelog/1018.miscellaneous.md @@ -1 +1 @@ -standardize type hints for pre, post and utils subpackages \ No newline at end of file +standardize type hints for ``pre``, ``post``, and ``utils`` subpackages \ No newline at end of file diff --git a/doc/source/changelog/1032.fixed.md b/doc/source/changelog/1032.fixed.md index 611ca9d4c..193e8f2d0 100644 --- a/doc/source/changelog/1032.fixed.md +++ b/doc/source/changelog/1032.fixed.md @@ -1 +1 @@ -remove wheelhosue from doc/source/_static dir \ No newline at end of file +remove wheelhouse from doc/source/_static dir \ No newline at end of file diff --git a/doc/source/changelog/1033.maintenance.md b/doc/source/changelog/1033.maintenance.md index 6e7c1377d..548bfc9b0 100644 --- a/doc/source/changelog/1033.maintenance.md +++ b/doc/source/changelog/1033.maintenance.md @@ -1 +1 @@ -bump version to 0.12dev0 \ No newline at end of file +bump version to 0.12.dev0 \ No newline at end of file diff --git a/doc/source/changelog/1043.documentation.md b/doc/source/changelog/1043.documentation.md new file mode 100644 index 000000000..7c8f8e10d --- /dev/null +++ b/doc/source/changelog/1043.documentation.md @@ -0,0 +1 @@ +overall review \ No newline at end of file diff --git a/doc/source/contribute/developer.rst b/doc/source/contribute/developer.rst index 8fecf8b1b..ad02175db 100644 --- a/doc/source/contribute/developer.rst +++ b/doc/source/contribute/developer.rst @@ -1,5 +1,5 @@ -Contributing as a developer -########################### +Contribute as a developer +######################### .. grid:: 1 2 3 3 @@ -8,14 +8,14 @@ Contributing as a developer :link: fork-the-repository :link-type: ref - Learn how to fork the project and get your own copy. + Fork the project to create a copy. .. grid-item-card:: :fa:`download` Clone the repository :padding: 2 2 2 2 :link: clone-the-repository :link-type: ref - Download your own copy in your local machine. + Clone the repository to download the copy to your local machine. .. grid-item-card:: :fa:`download` Install for developers :padding: 2 2 2 2 @@ -29,21 +29,22 @@ Contributing as a developer :link: run-tests :link-type: ref - Verify your changes by testing the project. + Verify your changes to the project by running tests. .. grid-item-card:: :fa:`computer` Code style compliance :padding: 2 2 2 2 :link: code-style :link-type: ref - Adhere to code style + Adhere to code style. .. grid-item-card:: :fa:`arrows-spin` Run the CI/CD pipelines :padding: 2 2 2 2 :link: run-pipelines :link-type: ref - Understand the different CI/CD pipelines. + Understand the different CI/CD pipelines that are executed + automatically. .. _fork-the-repository: @@ -52,13 +53,13 @@ Fork the repository =================== Forking the repository is the first step to contributing to the project. This -allows you to have your own copy of the project so you can make changes without +allows you to have your own copy of the project so that you can make changes without affecting the main project. Once you have made your changes, you can submit a -pull-request to the main project to have your changes reviewed and merged. +pull request to the main project to have your changes reviewed and merged. .. button-link:: https://github.com/ansys/pyansys-heart/fork :color: primary - :align: center + :align: left :fa:`code-fork` Fork this project @@ -71,7 +72,7 @@ pull-request to the main project to have your changes reviewed and merged. Clone the repository ==================== -Clone the latest version of PyAnsys Heart in development mode by running this code: +Clone the repository in development mode: .. code-block:: bash @@ -79,36 +80,34 @@ Clone the latest version of PyAnsys Heart in development mode by running this co .. note:: - If you are not an Ansys employee, you need to :ref:`fork the repository ` and - replace ``ansys`` with your GitHub user name in the ``git clone`` - command. + If you are not an Ansys employee, you must :ref:`fork the repository ` and + replace ``ansys`` with your GitHub user name in the ``git clone`` command. .. _install-for-developers: Install for developers ====================== -Installing PyAnsys Heart in development mode allows you to perform changes to the code -and see the changes reflected in your environment without having to reinstall +Installing PyAnsys Heart in development mode lets you change the code +and see these changes reflected in your environment without having to reinstall the library every time you make a change. -Virtual environment -------------------- +Set up a virtual environment +---------------------------- -Start by navigating to the project's root directory by running: +#. Navigate to the project's root directory : .. code-block:: - cd pyansys-heart + cd pyansys-heart -Then, create a new virtual environment named ``.venv`` to isolate your system's -Python environment by running: +#. Create a virtual environment named ``.venv`` to isolate your Python environment: .. code-block:: text python -m venv .venv -Finally, activate this environment by running: +#. Activate the virtual environment: .. tab-set:: @@ -134,67 +133,67 @@ Finally, activate this environment by running: source .venv/bin/activate -Development mode ----------------- +Install in development mode +--------------------------- -Now, install PyAnsys Heart in editable mode by running: +#. Install PyAnsys Heart in editable mode: -.. code-block:: text - - python -m pip install --editable . + .. code-block:: text -Verify the installation by checking the version of the library: + python -m pip install --editable . +#. Verify the installation by checking the version of the library: .. code-block:: python from ansys.heart import __version__ - print(f"PyAnsys Heart version is {__version__}") .. jinja:: .. code-block:: text - >>> PyAnsys Heart version is {{ PYANSYS_HEART_VERSION }} + >>> PyAnsys Heart version is {{ PYANSYS_HEART_VERSION }}. -Install tox +Install Tox ----------- -Once the project is installed, you can install `tox`_. This is a cross-platform +Once the project is installed, you can install `Tox`_. This is a cross-platform automation tool. The main advantage of Tox is that it eases routine tasks like project testing, documentation generation, and wheel building in separate and isolated Python -virtual environments. To install Tox, run: +virtual environments. -.. code-block:: text +#. Install Tox: + + .. code-block:: text - python -m pip install tox + python -m pip install tox -Finally, verify the installation by listing all the different environments +#. Verify the installation by listing all the different environments (automation rules) for PyAnsys Heart: -.. code-block:: text + .. code-block:: text - python -m tox list + python -m tox list -.. jinja:: toxenvs + .. jinja:: toxenvs - .. dropdown:: Default Tox environments - :animate: fade-in - :icon: three-bars + .. dropdown:: Default Tox environments + :animate: fade-in + :icon: three-bars - .. list-table:: - :header-rows: 1 - :widths: auto + .. list-table:: + :header-rows: 1 + :widths: auto - * - Environment - - Description - {% for environment in envs %} - {% set name, description = environment.split("->") %} - * - {{ name }} - - {{ description }} - {% endfor %} + * - Environment + - Description + {% for environment in envs %} + {% set name, description = environment.split("->") %} + * - {{ name }} + - {{ description }} + {% endfor %} .. _run-tests: @@ -202,9 +201,9 @@ Run the tests ============= Once you have made your changes, you can run the tests to verify that your -modifications did not break the project. PyAnsys Heart tests support different markers -to allow testing with/without coverage (and against specific python versions). -These markers are associated with dedicated `Tox`_ environments. +changes did not break the project. PyAnsys Heart tests support different markers +to allow testing with or without coverage (and against specific Python versions). +These markers are associated with dedicated Tox environments. .. jinja:: toxenvs @@ -228,9 +227,9 @@ These markers are associated with dedicated `Tox`_ environments. .. Note:: - The preceding test commands run all tests, including those that require Fluent (which take longer). For more - selective testing, ``-- -vv -m "not requires_fluent or (not extract_models)"`` or ``-- -vv -m "requires_fluent"`` can be - appended to tox testing commands. + The preceding test code runs all tests, including those that require Fluent (which take longer). For more + selective testing, append ``-- -vv -m "not requires_fluent or (not extract_models)"`` or ``-- -vv -m "requires_fluent"`` + to Tox testing commands: .. code:: bash @@ -244,12 +243,12 @@ These markers are associated with dedicated `Tox`_ environments. Check code style ================ -PyAnsys Heart follows the PEP8 standard as outlined in +PyAnsys Heart follows the PEP 8 standard as described in `PEP 8 `_ in -the *PyAnsys Developer's Guide* and implements style checking using +the *PyAnsys developer's guide* and implements style checking using `pre-commit `_. -To ensure your code meets minimum code styling standards, run the following tox environment: +To ensure your code meets minimum code styling standards, run the following Tox environment: .. jinja:: toxenvs @@ -293,10 +292,10 @@ Run CI/CD pipelines PyAnsys Heart has a set of CI/CD pipelines that are executed automatically when certain events are detected in the repository. Some of these events include opening a -pull-request, labelling a pull-request, and tagging a commit. +pull request, labeling a pull-request, and tagging a commit. -You can label a pull-request to skip certain jobs in the pipeline. Supported -labels are listed in the `PyAnsys Heart labels`_ page. +You can label a pull request to skip certain jobs in the pipeline. Supported +labels are listed on the `PyAnsys Heart labels`_ page. .. list-table:: :widths: auto @@ -305,4 +304,4 @@ labels are listed in the `PyAnsys Heart labels`_ page. * - Label - Description * - ``test:skip`` - - Skip the model generation tests \ No newline at end of file + - Skip the model generation tests diff --git a/doc/source/contribute/documentarian.rst b/doc/source/contribute/documentarian.rst index ece289a38..0ae3bb59d 100644 --- a/doc/source/contribute/documentarian.rst +++ b/doc/source/contribute/documentarian.rst @@ -1,5 +1,5 @@ -Contributing as a documentarian -############################### +Contribute as a documentarian +############################# .. grid:: 1 2 3 3 :padding: 2 2 2 2 @@ -8,54 +8,53 @@ Contributing as a documentarian :link: write-documentation :link-type: ref - Explain how to get started, use, and contribute to the project. + Learn how to get started, use, and contribute to the project. .. grid-item-card:: :fa:`laptop-code` Add a new example :link: write-examples :link-type: ref - Showcase the capabilities of PyAnsys Heart by adding a new example. + Write a new example to showcase the capabilities of PyAnsys Heart. .. grid-item-card:: :fa:`book` Build the documentation :link: build-documentation :link-type: ref - Render the documentation to see your changes reflected. + Build the documentation to see your changes rendered. .. _write-documentation: Write documentation =================== -The documentation generator used in PyAnsys Heart is `Sphinx`_. Most of the documents -are written in `reStructuredText`_. Some parts of the documentation, like the -`examples <../examples/index>`_, use a mix of `reStructuredText`_ and Python, thanks to `Sphinx-Gallery`_. -If you are interested in writing examples, see the :ref:`writing examples ` -section. +`Sphinx`_ is the tool used to generate PyAnsys Heart documentation. You write most of the content +in `ReStructuredText`_ files. However, some of the content, like the +`examples <../examples/index>`_, use a mix of `ReStructuredText`_ and Python files, thanks to `Sphinx-Gallery`_. +If you are interested in writing examples, see the :ref:`write-examples`. The documentation is located in the ``doc/source`` directory. The landing page -is declared in the ``doc/source/index.rst`` file. The rest of the files contain -the main pages of different sections of the documentation. Finally, the -``doc/source/_static/`` folder contains various assets like images, and CSS +is declared in the ``doc/source/index.rst`` file. The subdirectories contain +the pages of different sections of the documentation. Finally, the +``doc/source/_static/`` directory contains various assets like images and CSS files. -The layout of the ``doc/source`` directory is reflected in the slug of the +The layout of the ``doc/source`` directory is reflected in the URLs of the online documentation. For example, the -``doc/source/contribute/documentarian.rst`` renders as -``https://heart.docs.pyansys.com/contribute/documentarian.html``. +``doc/source/contribute/documentarian.rst`` file renders as the +``https://heart.docs.pyansys.com/contribute/documentarian.html`` URL. -Thus, if you create a new file, it important to follow these rules: +Thus, if you create a file, it is important to follow these rules: -- Use lowercase letters for file and directory names -- Use short and descriptive names -- Use hyphens to separate words -- Play smart with the hierarchy of the files and directories +- Use lowercase letters for file and directory names. +- Use short and descriptive names. +- Use hyphens to separate words. +- Logically organize the hierarchy of the files and directories -All files need to be included in a table of contents. No dangling files are -permitted. If a file is not included in the table of contents, Sphinx raises a -warning. +You must include all files in the table of contents. Sphinx does not permit any orphan files. +If you do not include a file in the table of contents, Sphinx raises a warning that causes +the build to fail. -A table of contents can be declared using a directive like this: +You declare the table of contents using a directive like this: .. code-block:: rst @@ -77,46 +76,50 @@ Write a new example =================== The `examples <../examples/index>`_ section of the documentation showcases different -capabilities of PyAnsys Heart. Each example (grouped into folders of related examples) -is a standalone Python script. Despite being ``*.py`` files, they are written in a mix -of `reStructuredText`_ and Python. This is possible thanks to the `Sphinx-Gallery`_ -Sphinx extension. +capabilities of PyAnsys Heart. Each example is a standalone Python script. You group +related examples into subdirectories. Despite being PY files, they are written in a mix +of `ReStructuredText`_ and Python. This is possible thanks to the `Sphinx-Gallery`_ +extension. Documentarians writing new examples are encouraged to familiarize themselves with -`structuring Python scripts for Sphinx-Gallery `_. -Once the ``.py`` file for a new example is properly set up, Sphinx-Gallery automatically -generates `Sphinx`_ `reStructuredText`_ files from it. The rendering of the resulting reST provides -users with ``.ipynb`` (Jupyter notebook) and ``.py`` files of each example, which users can download. +`Structuring Python scripts for Sphinx-Gallery `_. +Once the PY file for a new example is properly set up, Sphinx-Gallery automatically +generates `Sphinx`_ `ReStructuredText`_ (RST) files from it. The rendering of the resulting +RST file for each example provides links for downloading a IPYNB (Jupyter notebook) and PY file. Finally, here are some tips for writing examples: - Start the example with an explanation of the main topic. Try to use as many relevant - keywords as possible in this section to optimize for Search Engine Optimization. + keywords as possible in this section for search engine optimization. - Include an explanation with each code cell. The explanations should be included before, not after, the corresponding code. -- The examples are built with the documentation. As part of the build process, - screenshots of rendered graphics are inserted in the document. You do not need +- The examples are built with the documentation. During the build process, + screenshots are inserted in the rendered document. You do not need to include the screenshots yourself. -- When creating a new folder which includes more than one related examples, ensure - a ``README.txt`` file is also included. This file should contain reST to be used as the header - for the index page corresponding to the subsection for these examples in the generated documentation. +- When creating a child directory that is to include multiple related examples, ensure that + you include a ``README.txt`` file with the ReStructuredText content to + use for the index page for this subsection's examples in the generated documentation. .. _build-documentation: Build the documentation ======================= -`Tox`_ is used for automating the build of the documentation. To install Tox, run +`Tox`_ is used for automating the build of the documentation. + +To install Tox: .. code-block:: text python -m pip install tox -There are different tox environments for building the HTML documentation, building the PDF documentation, -and checking the integrity of external links. The following environments are available: +There are different environments for cleaning the build, building the documentation +in different formats such as HTML and PDF, and running the tests. + +The following environments are available: .. jinja:: toxenvs diff --git a/doc/source/contribute/user.rst b/doc/source/contribute/user.rst index 5862bde02..d04bca510 100644 --- a/doc/source/contribute/user.rst +++ b/doc/source/contribute/user.rst @@ -1,10 +1,15 @@ -Contributing as a user -###################### +Contribute as a user +#################### Users can contribute in a variety of ways, such as reporting bugs, requesting new features, testing in-development features, starting discussions, answering questions, and sharing their work with the community. +.. warning:: + + Do not include any proprietary or sensitive information when reporting bugs + or showcasing your work. + .. grid:: 1 2 3 3 :padding: 2 2 2 2 @@ -33,7 +38,7 @@ questions, and sharing their work with the community. :link: start-a-discussion :link-type: ref - Want to discuss something? Start a discussion here. + Want to discuss something? Start or contribute to a discussion. .. grid-item-card:: :fa:`comment-dots` Answer questions :padding: 2 2 2 2 @@ -61,14 +66,14 @@ questions, and sharing their work with the community. Report bugs =========== -If you encounter a bug or an issue while using the project, please report it. -Your feedback helps to identify problems. +If you encounter a bug or an issue while using the project, report it. +Your feedback helps to identify problems and get them resolved. -- Search the `PyAnsys Heart issues`_ to see if the issue has already been reported. +- Search the `PyAnsys Heart Issues`_ page to see if the issue has already been reported. -- Create a new issue if it hasn’t been reported. +- Create an issue if one doesn't already exist. - - Include a clear description of the problem. + - Include a clear description of the issue. - Provide steps to reproduce the issue. - Mention the version of the project you're using. - Include screenshots or logs if possible. @@ -79,21 +84,22 @@ Request a new feature ===================== Do you have an idea for a new feature or an improvement? Your suggestions are -welcome. You can request a new feature by creating an issue in the `PyAnsys Heart issues`_ -board. +welcome. You can request a new feature by creating an issue on the `PyAnsys Heart Issues`_ +page. .. _test-a-new-feature: Test a new feature ================== -It is possible to test a new feature before it is officially released. To do -so, you can install PyAnsys Heart from the source code by following the steps below. +You can test a new feature before it is officially released. To do +so, you can install PyAnsys Heart from the source code by performing the +steps in the following child topics. Clone the repository -------------------- -Clone and install the latest version of PyAnsys Heart by running this code: +Clone and install the repository: .. code-block:: bash @@ -102,85 +108,84 @@ Clone and install the latest version of PyAnsys Heart by running this code: Install for users ----------------- -Installing the latest version of PyAnsys Heart allows you to test latest features as -they are being developed without having to wait for releases. - -Virtual environment -~~~~~~~~~~~~~~~~~~~ +Install the latest version of PyAnsys Heart to test the latest features as +they are being developed, without having to wait for releases. -Start by navigating to the project's root directory by running: +Set up a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. code-block:: +#. Navigate to the project's root directory: - cd pyansys-heart + .. code-block:: -Then, create a new virtual environment named ``.venv`` to isolate your system's -Python environment by running: + cd pyansys-heart -.. code-block:: text +#. Create a new virtual environment named ``.venv`` to isolate your system's + Python environment: - python -m venv .venv + .. code-block:: text -Finally, activate this environment by running: + python -m venv .venv -.. tab-set:: +3. Activate this environment: - .. tab-item:: Windows + .. tab-set:: - .. tab-set:: + .. tab-item:: Windows - .. tab-item:: CMD + .. tab-set:: - .. code-block:: text + .. tab-item:: CMD - .venv\Scripts\activate.bat + .. code-block:: text - .. tab-item:: PowerShell + .venv\Scripts\activate.bat - .. code-block:: text + .. tab-item:: PowerShell - .venv\Scripts\Activate.ps1 + .. code-block:: text - .. tab-item:: macOS/Linux/UNIX + .venv\Scripts\Activate.ps1 - .. code-block:: text + .. tab-item:: macOS/Linux/UNIX - source .venv/bin/activate + .. code-block:: text -Latest version installation -~~~~~~~~~~~~~~~~~~~~~~~~~~~ + source .venv/bin/activate -Now, install PyAnsys Heart in editable mode by running: +Install the latest version +~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. code-block:: text +#. Install PyAnsys Heart in editable mode: - python -m pip install . + .. code-block:: text -Verify the installation by checking the version of the library: + python -m pip install . +2. Verify the installation by checking the version of the library: -.. code-block:: python + .. code-block:: python - from ansys.heart import __version__ + from ansys.heart import __version__ - print(f"PyAnsys Heart version is {__version__}") + print(f"PyAnsys Heart version is {__version__}.") -.. jinja:: + .. jinja:: - .. code-block:: text + .. code-block:: text - >>> PyAnsys Heart version is {{ PYANSYS_HEART_VERSION }} + >>> PyAnsys Heart version is {{ PYANSYS_HEART_VERSION }}. .. _start-a-discussion: Start a discussion ================== -Complex topics may require a discussion. Whether you want to know how to use +Complex topics might require a discussion. Whether you want to know how to use PyAnsys Heart for solving your specific problem or you have a suggestion for a new feature, a discussion is a good place to start. You can open a new discussion -in the `PyAnsys Heart discussions`_ section. +on the `PyAnsys Heart Discussions`_ page. .. _answer-questions: @@ -198,8 +203,8 @@ Share your work =============== If you have used PyAnsys Heart to create something interesting, share it with the rest -of the community. You can share your work in the `PyAnsys Heart discussions`_. Include -a brief description of your work and any relevant links that others may find +of the community. You can share your work on the `PyAnsys Heart discussions`_ page. Include +a brief description of your work and any relevant links that others might find useful. .. _view-documentation: diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index 0e25b39e4..5f2762e95 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -1,19 +1,24 @@ .. _contributing: -Contributing -############ +Contribute +########## -Overall guidance on contributing to a PyAnsys repository appears in -`Contribute `_ -in the *PyAnsys Developer's Guide*. Ensure that you are thoroughly familiar -with this guide before attempting to contribute to PyAnsys Heart. +Thank you for your interest in contributing to PyHeart. Contributions for +making the project better can include fixing bugs, adding new features, and +improving the documentation. .. important:: This project adheres to the `Contributor Covenant Code of Conduct`_. By - participating, you agree to uphold this code. + participating, you agree to uphold this code of conduct. + +Overall guidance on contributing to a PyAnsys repository appears in +`Contribute `_ +in the *PyAnsys developer's guide*. Ensure that you are thoroughly familiar +with this guide before attempting to contribute to PyAnsys Heart. -The following contribution information is specific to PyAnsys Heart. Start by selecting your role in the project: +The following contribution information is specific to PyAnsys Heart. Start by +selecting your role in the project: .. grid:: 1 2 3 3 :padding: 2 2 2 2 @@ -22,13 +27,13 @@ The following contribution information is specific to PyAnsys Heart. Start by se :link: contribute/user :link-type: doc - Report bugs, suggesting features, and ask questions. + Report bugs, suggest features, and ask questions. .. grid-item-card:: :fa:`book` Documentarian :link: contribute/documentarian :link-type: doc - Improve the documentation and write new guides. + Improve the documentation and write new content. .. grid-item-card:: :fa:`laptop-code` Developer :link: contribute/developer diff --git a/doc/source/getting-started.rst b/doc/source/getting-started.rst index 05b353750..06824fe45 100644 --- a/doc/source/getting-started.rst +++ b/doc/source/getting-started.rst @@ -1,7 +1,7 @@ Getting started =============== -This guide helps you get started with PyAnsys Heart. It provides information on how to install the package, run tests, and check the code style. +This section helps you get started with PyAnsys Heart. It provides information on how to install the package, run tests, and check the code style. .. grid:: 1 2 3 3 :gutter: 1 2 3 3 diff --git a/doc/source/getting-started/installation.rst b/doc/source/getting-started/installation.rst index 9e97d911a..bd079ef9f 100644 --- a/doc/source/getting-started/installation.rst +++ b/doc/source/getting-started/installation.rst @@ -1,37 +1,38 @@ Installation ============ -This guide helps you install PyAnsys Heart. It provides information on how to install -the package from PyPI, from a wheel file, or from the source code. +This page explains how to install PyAnsys Heart from PyPI, the GitHub source code, or a wheel file. -.. Note:: +.. note:: - If you do not have access to *PyAnsys Heart* you can follow the instructions under *Install from a wheel file*. You may need to request the wheel files from your Ansys contact. + If you do not have access to PyAnsys Heart, follow the instructions in :ref:`install_from_wheel`. + You might need to request the wheel file from your Ansys contact. -.. Warning:: +.. warning:: - Consider installing using a virtual environment to avoid conflicts with other packages. + Consider installing using a virtual environment to avoid conflicts with other packages. For more information, + see `Creation of virtual environments `_ in the Python documentation. -PyPI ----- +Install from PyPI +----------------- -Before installing PyAnsys Heart ensure that you have the latest version -of the `pip`_ -package manager, run the following command: +Before you install PyAnsys Heart, ensure that you have the latest version +of the `pip`_ package manager: .. code:: bash python -m pip install --upgrade pip -Then, to install PyAnsys Heart, run the following command: +Then, install PyAnsys Heart: .. code:: bash python -m pip install pyansys-heart -GitHub ------- -To install the latest version of PyAnsys Heart from the source code, +Install from GitHub source code +------------------------------- + +To install the latest version of PyAnsys Heart from the GitHub source code, clone the repository: .. code:: bash @@ -40,24 +41,25 @@ clone the repository: cd pyansys-heart pip install -e . -to verify the installation, run the following command: +Then, verify the installation: .. code:: bash python -m pip install tox tox +.. _install_from_wheel: + Install from a wheel file ------------------------- -if you lack the internet connection, you can install PyAnsys Heart from a wheel file. -You should install PyAnsys Heart by downloading the wheelhouse archive for your -corresponding machine architecture from the repository’s -`PyAnsys Heart release page`_. +If you do not have an internet connection, you can install PyAnsys Heart from a wheel file. +Download the wheelhouse archive for your corresponding machine architecture +from the `repository’s Releases page `_. Each release contains a wheel file for the corresponding Python version and machine architecture. For example, to install the wheel file for -Python 3.10 on a Windows machine, run the following command: +Python 3.10 on a Windows machine, run the following commands: .. code:: bash @@ -65,8 +67,4 @@ Python 3.10 on a Windows machine, run the following command: pip install pyansys-heart -f wheelhouse --no-index --upgrade --ignore-installed If you are on Windows with Python 3.12, unzip the wheelhouse archive to a wheelhouse -directory and then install using the same pip install command as in the preceding example. - -Consider installing using a virtual environment to avoid conflicts with other packages. For more information, -refer to the `Python documentation`_. - +directory and then install using the same ``pip install`` command as in the preceding example. diff --git a/doc/source/getting-started/overview.rst b/doc/source/getting-started/overview.rst index fba0f3386..03a13a58f 100644 --- a/doc/source/getting-started/overview.rst +++ b/doc/source/getting-started/overview.rst @@ -1,120 +1,112 @@ PyAnsys Heart overview ----------------------- +====================== -This section provides a brief review of key techniques in cardiac modeling. A list of references is also provided. - -It is assumed that the reader has a basic knowledge on LS-DYNA usage. However, it is encouraged to read the `LS-DYNA manuals`_ for a comprehensive understanding of the modeling process. +This overview describes key techniques in cardiac modeling and provides a list of references. +It assumes that you have a basic knowledge of LS-DYNA usage. For a comprehensive understanding +of the modeling process, see the `LS-DYNA manuals `_. Anatomy generation -^^^^^^^^^^^^^^^^^^ -Currently *PyAnsys Heart* supports generating electrophysiology, electro-mechanical and mechanical models from 24 pathological (`Strocchi et al.`_) and 20 healthy (`Rodero et al.`_) hearts. +------------------ + +PyAnsys Heart supports generating electrophysiology, electro-mechanical, and mechanical models from 24 pathological (`Strocchi et al.`_) and 20 healthy (`Rodero et al.`_) hearts. .. note:: - Input .case and .vtk files for both repositories can be found here: + Input CASE and VTK files for both repositories are available in these publications on the Zenodo website: - * https://zenodo.org/records/3890034 - * https://zenodo.org/records/4590294 + * `A Publicly Available Virtual Cohort of Four-chamber Heart Meshes for Cardiac Electro-mechanics Simulations `_ + * `Virtual cohort of adult healthy four-chamber heart meshes from CT images `_ -These .vtk and .case files are processed into a compatible input format where the naming and ids of the surfaces are inferred from the part ids and consequently written to an input .vtp file and .json file. -These input files are then further processed into a HeartModel that contains the various relevant anatomical features (left ventricle, right ventricle, endo- and epicardium, and cavities). This -HeartModel is a Python object that is then further processed (add physics) and exported as a LS-DYNA model. +These VTK and CASE files are processed into a compatible input format where the naming and IDs of the surfaces are inferred from the part IDs and consequently written to an input VTP file and JSON file. These input files are then further processed into a HeartModel that contains the various relevant anatomical features (left ventricle, right ventricle, endo- and epicardium, and cavities). This HeartModel is a Python object that then has physics added processed before being exported as an LS-DYNA model. +Some anatomical details are difficult to capture with medical imaging techniques but are crucial for physical simulations. Fortunately, some rule-based methods proposed in the literature are included in PyAnsys Heart. -Brief theory -^^^^^^^^^^^^ -Some anatomical details are difficult to capture with medical imaging techniques but are crucial for physical simulations. Fortunately, some rule-based methods proposed in the literature are included in *PyAnsys Heart*. +- **Conduction System** + Atrial fibers can be generated by a rule-based method published in `Piersanti et al.`_. Unlike ventricular fibers, which rely on the keyword ``*EM_EP_CREATEFIBERORIENTATION`` in LS-DYNA, this algorithm is implemented in PyAnsys Heart by solving multiple (thermal) Laplace's equations in LS-DYNA. -- **Conduction System:** - Atrial fibers can be generated by a rule-based method published in `Piersanti et al.`_. Unlike ventricular fibers, which rely on the keyword ``*EM_EP_CREATEFIBERORIENTATION`` in LS-DYNA, this algorithm is implemented in *PyAnsys Heart* by solving multiple (thermal) Laplace's equations in LS-DYNA. +- **UHC** + A consistent UHC (Universal Heart Coordinates) system can be convenient for landmark determination, data transferring, and more. A universal ventricular coordinate system presented in `Bayer et al.2`_ is implemented. A universal coordinate system for atria, as presented in `Roney et al.`_, is to be implemented. -- **UHC:** - A consistent Universal Heart Coordinates (UHC) system can be convenient for landmark determination, data transferring, etc. A universal ventricular coordinate system presented in `Bayer et al.2`_ is implemented. A universal coordinate system for atria, as presented in `Roney et al.`_, is to be implemented. +Electrophysiology +----------------- +PyAnsys Heart provides three options for cardiac electrophysiology modeling (electrical propagation) in the +``simulator.settings.settings.epanalysis.solvertype`` API: +- ``Monodomain`` (``*EM_CONTROL`` with EMSOL=11 in LS-DYNA) +- ``Eikonal` (``*EM_CONTROL`` with EMSOL=14 in LS-DYNA) +- ``ReactionEikonal`` model (``*EM_CONTROL`` with EMSOL=15 in LS-DYNA) -Electrophysiology -^^^^^^^^^^^^^^^^^ +General descriptions follow for these and other models: -This section introduces cardiac electrophysiology modeling in *PyAnsys Heart*. -Three options are available to model electrical propagation in *PyAnsys Heart* (see `simulator.settings.settings.epanalysis.solvertype`): `Monodomain` (``*EM_CONTROL`` with EMSOL=11 in LS-DYNA), `Eikonal` (``*EM_CONTROL`` with EMSOL=14 in LS-DYNA) and `ReactionEikonal` model (``*EM_CONTROL`` with EMSOL=15 in LS-DYNA). - -- Monodomain: - The Monodomain model is a reaction-diffusion model and is a simplification of the Bidomain model `Potse et al.`_. In LS-DYNA, the 'passive' electrical material properties (electrical conductivity, membrane capacitance, surface/volume ratio) corresponding to the Monodomain model are set in ``*EM_MAT_003`` for the myocardium and ``*EM_MAT_001`` for the beams of the conduction system. These are to be completed with 'active' properties using a cell model (see 'cell model' section). +- **Monodomain** + The Monodomain model, a reaction-diffusion model, is a simplification of the Bidomain model `Potse et al.`_. In LS-DYNA, the *passive* electrical material properties (electrical conductivity, membrane capacitance, and surface/volume ratio) corresponding to the Monodomain model are set in ``*EM_MAT_003`` for the myocardium and ``*EM_MAT_001`` for the beams of the conduction system. These are to be completed with *active* properties using a cell model. For more information, see the "cell model" entry. .. Note:: - LS-DYNA offers the possibility of using either the Bidomain, Monodomain, or a mix of - both models but only the Monodomain is exposed in *PyAnsys Heart* for now. + LS-DYNA offers the possibility of using the Bidomain model, Monodomain model, or a mix of + both of these models. However, PyAnsys Heart exposes only the Monodomain model currently. -- Eikonal: - In this case only the activation time is computed, no cell model is used. - Here, the 'passive' electrical material properties are also set with ``*EM_MAT_003`` for 3D tissue and ``*EM_MAT_001`` for the beams of the conduction system. +- **Eikonal** + The Eikonal model does not use a cell model but only computes the activation time. + The *passive* electrical material properties are set with ``*EM_MAT_003`` for 3D tissue and ``*EM_MAT_001`` for the beams of the conduction system. -- Reaction Eikonal: - The Reaction Eikonal model first computes the activation time on each node, then it assigns action potential curves to each node with a time delay that corresponds to the activation time. Passive properties are the same as those described in the pure Eikonal model. +- **Reaction Eikonal** + The Reaction Eikonal model first computes the activation time on each node, and then it assigns action potential curves to each node with a time delay that corresponds to the activation time. Passive properties are the same as those for the pure Eikonal model. -- cell model: - The cell model used *PyAnsys Heart* is the `TenTusscher et al.`_ model, other models are to be added in the future. - When UHCs are computed, the transmural coordinate is used to distinguish between endo-, epi- and mid- myocardium layers using the corresponding version of the TenTusscher model. +- **Cell model** + The cell model used in PyAnsys Heart is the `TenTusscher et al.`_ model. Other models to be added in the future. + When UHCs are computed, the transmural coordinate is used to distinguish between endo-, epi-, and mid- myocardium layers using the corresponding version of the TenTusscher model. .. Note:: - LS-DYNA supports other cell models and user defined models, see the `*EM_EP` collection of keywords in `LS-DYNA manuals`_. + LS-DYNA supports other cell models and user-defined models. For more information, see the ``*EM_EP`` collection of keywords in the `LS-DYNA manuals `_. -- Stimulation: - Tissue stimulation is set by default on the SA node in a four-chamber model and in the left and right apex in case of a left ventricle or biventricular model. However, users can define their own stimulation origin and profile (see the `Stimulation definition example`). +- **Stimulation** + Tissue stimulation is set by default on the SA node in a four-chamber model and in the left and right apex in case of a left ventricle or biventricular model. However, you can define your own stimulation origin and profile. For more information, see :ref:`stimulation_definition_example`. Mechanics --------- -This section explains the key elements in cardiac mechanical models and their default options in *PyAnsys Heart*. - +Descriptions follow for key elements in cardiac mechanical models, along with their default options in PyAnsys Heart. -- Material: - Cardiac tissue mechanics is modeled using `MAT_295`_, which consists of two components: passive and active. By default, the passive component is represented by the `Holzapfel`-type model for both isotropic and anisotropic properties. For the active component, the `Guccione` model (ACTYPE=1) is used for mechanical models, while the `Hunter` model (ACTYPE=3) is employed for electromechanical models. +- **Material** + Cardiac tissue mechanics is modeled using `MAT_295 `_, which consists of two components: passive and active. By default, the passive component is represented by the `Holzapfel`-type model for both isotropic and anisotropic properties. For the active component, the `Guccione` model (ACTYPE=1) is used for mechanical models, while the `Hunter` model (ACTYPE=3) is employed for electromechanical models. -- Boundary conditions: +- **Boundary conditions** Boundary conditions are considered following the approach presented in `Strocchi et al.`_. Robin-type conditions are applied at the heart's valve regions, depending on the specific model constructed. Additionally, the pericardium's effect is accounted for by adding springs and dampers at the epicardium. For the ventricles, the stiffness of springs is scaled from different locations to constrain the motion primarily at the apex region. - -- Circulation model - Many papers have described the coupling between 3D heart models and 0D circulation models, such as those by `Agustin et al.`_. LS-DYNA uses ``CONTROL_VOLUME`` related keywords to achieve this coupling. By default, *PyAnsys Heart* provides a simple open-loop model. Specifically, a 2-element Windkessel model is applied to the left and right ventricle. If atria are present, atrioventricular valves are represented by a diode model, and a constant venous inflow is set for both atria. If no atrium is present, a constant venous pressure (preload) is set. +- **Circulation model** + Many papers have described the coupling between 3D heart models and 0D circulation models, such as those by `Agustin et al.`_. LS-DYNA uses ``CONTROL_VOLUME`` related keywords to achieve this coupling. By default, PyAnsys Heart provides a simple open-loop model. Specifically, a two-element Windkessel model is applied to the left and right ventricle. If atria are present, atrioventricular valves are represented by a diode model, and a constant venous inflow is set for both atria. If no atrium is present, a constant venous pressure (preload) is set. .. Figure(?) .. closed loop, twin builder ? -- Stress free configuration - It is assumed that the input geometry is in the state of end-diastole. To account for the initial stress from the end-diastolic pressure, the stress-free configuration is computed using the keyword `*CONTROL_REFERENCE_CONFIGURATION`. Then, the pressure is reapplied to the stress-free geometry, and a "virtual" end-diastolic mesh with the initial stress is exported. This mesh and initial stress is subsequently used in the final simulation. - - +- **Stress free configuration** + It is assumed that the input geometry is in the state of end-diastole. To account for the initial stress from the end-diastolic pressure, the stress-free configuration is computed using the keyword ``*CONTROL_REFERENCE_CONFIGURATION``. Then, the pressure is reapplied to the stress-free geometry, and a *virtual* end-diastolic mesh with the initial stress is exported. This mesh and initial stress is subsequently used in the final simulation. References ---------- -_`LS-DYNA manuals`: https://lsdyna.ansys.com/manuals/ - -_`MAT_295`: https://ftp.lstc.com/anonymous/outgoing/support/PAPERS/mat_295_formulation_public.pdf +_`Agustin et al.`: Augustin, Christoph M., et al. “A computationally efficient physiologically comprehensive 3D-0D closed-loop model of the heart and circulation.” Computer methods in applied mechanics and engineering 386 (2021): 114092. _`Bayer et al.`: Bayer, J.D., Blake, R. C., Plank, G., and Trayanova, N. A., “A novel rule-based algorithm for assigning myocardial fiber orientation to computational heart models,” Annals of biomedical engineering, 40(10), 2243-2254 (2012) -_`Costabal et al.`: Costabal, Francisco Sahli, Daniel E. Hurtado, and Ellen Kuhl. "Generating Purkinje networks in the human heart." Journal of biomechanics 49.12 (2016): 2455-2465. +_`Bayer et al.2`: Bayer, Jason, et al. “Universal ventricular coordinates: A generic framework for describing position within the heart and transferring data.” Medical image analysis 45 (2018): 83-93. -_`Strocchi et al.`: Strocchi, Marina, et al. "Simulating ventricular systolic motion in a four-chamber heart model with spatially varying robin boundary conditions to model the effect of the pericardium." Journal of Biomechanics 101 (2020): 109645. +_`Costabal et al.`: Costabal, Francisco Sahli, Daniel E. Hurtado, and Ellen Kuhl. "Generating Purkinje networks in the human heart." Journal of biomechanics 49.12 (2016): 2455-2465. _`Piersanti et al.`: Piersanti, Roberto, et al. "Modeling cardiac muscle fibers in ventricular and atrial electrophysiology simulations." Computer Methods in Applied Mechanics and Engineering 373 (2021): 113468. -_`Roney et al.`: Roney, Caroline H., et al., “Universal atrial coordinates applied to visualisation, registration and construction of patient specific meshes.” Medical image analysis 55 (2019): 65-75. +_`Potse et al.`: Potse, M., Dube, B., Richer, J., Vinet, A., Gulrajani, R.: A comparison of monodomain and bidomain reaction-diffusion models for action potential propagation in the human heart. IEEE Transactions on Biomedical Engineering 53(12), 2425- 2435 (dec 2006). -_`Bayer et al.2`: Bayer, Jason, et al. “Universal ventricular coordinates: A generic framework for describing position within the heart and transferring data.” Medical image analysis 45 (2018): 83-93. +_`Rodero et al.`: Rodero, C., et al. (2021). Virtual cohort of adult healthy four-chamber heart meshes from CT images. In PLOS Computational Biology (1.0.0). -_`Agustin et al.`: Augustin, Christoph M., et al. “A computationally efficient physiologically comprehensive 3D-0D closed-loop model of the heart and circulation.” Computer methods in applied mechanics and engineering 386 (2021): 114092. +_`Roney et al.`: Roney, Caroline H., et al., “Universal atrial coordinates applied to visualisation, registration and construction of patient specific meshes.” Medical image analysis 55 (2019): 65-75. -_`Potse et al.`: Potse, M., Dube, B., Richer, J., Vinet, A., Gulrajani, R.: A comparison of monodomain and bidomain reaction-diffusion models for action potential propagation in the human heart. IEEE Transactions on Biomedical Engineering 53(12), 2425- 2435 (dec 2006). +_`Strocchi et al.`: Strocchi, Marina, et al. "Simulating ventricular systolic motion in a four-chamber heart model with spatially varying robin boundary conditions to model the effect of the pericardium." Journal of Biomechanics 101 (2020): 109645. _`TenTusscher et al.`: Ten Tusscher, K. H., & Panfilov, A. V. (2006). Alternans and spiral breakup in a human ventricular tissue model. American Journal of Physiology-Heart and Circulatory Physiology, 291(3), H1088-H1100. -_`Rodero et al.`: Rodero, C., et al. (2021). Virtual cohort of adult healthy four-chamber heart meshes from CT images. In PLOS Computational Biology (1.0.0). - .. numerical damping from here .. TODO: atrial coordinate system \ No newline at end of file diff --git a/doc/source/getting-started/prerequisites.rst b/doc/source/getting-started/prerequisites.rst index 51a30b7c6..d3efefab5 100644 --- a/doc/source/getting-started/prerequisites.rst +++ b/doc/source/getting-started/prerequisites.rst @@ -5,20 +5,19 @@ Operating system ---------------- - Windows 10 -- Linux Ubuntu +- Linux Ansys tools ----------- -This framework was developed and tested under `Python310`_, `Python311`_ and `Python312`_ versions. -Before starting the -installation run ``python --version`` and check that it fits with the supported versions. +This framework was developed and tested under `Python 3.10 `_, `Python 3.11 `_, and `Python 3.12 `_. +Before starting the installation, run the ``python --version`` command and check that you are using one of the supported versions. Software -------- -.. list-table:: Required Ansys products +.. list-table:: **Required Ansys products** :widths: 200 300 200 400 :header-rows: 1 @@ -28,21 +27,21 @@ Software - Link to download * - Ansys Fluent - - R24R1, R24R2, R25R1 - - Pre-processor + - 2024 R1, 2024 R2, 2025 R1 + - Preprocessor - `Ansys Customer Portal`_ * - Ansys DPF Server - - 2024.1 (R24R1 install), 2024.1rc1, 2024.2rc0 - - Post-processor + - 2024.1 (comes with the 2024 R1 installation), 2024.1rc1, 2024.2rc0 + - Postprocessor - `Ansys Customer Portal`_ * - Ansys LS-DYNA - - R16.0 + - 16.0.0 IntelMPI double precision, 16.0.0 MSMPI double precision - Simulator - - Contact `PyAnsys Core team `_ to get more information + - `Ansys LSDYNA Product Space`_ or contact the `PyAnsys Core team `_ to get more information. -.. Note:: +.. note:: - Fluent is required for meshing. Also note that currently the postprocessor module is only compatible with Ansys DPF Servers 2024.1 (comes with R24R1 installation), 2024.1rc1 and 2024.2rc0. Later versions are currently not supported. Hence installing Ansys Fluent R24R1 is currently the most convenient. + Ansys Fluent is required for meshing. Also note that currently the postprocessor module is only compatible with Ansys DPF Servers 2024.1 (comes with the 2024 R1 installation), 2024.1rc1, and 2024.2rc0. Later versions are currently not supported. Hence, installing Ansys Fluent 2024 R1 is currently the most convenient. diff --git a/doc/source/index.rst b/doc/source/index.rst index bb3e9a79a..abbcb574f 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -36,7 +36,7 @@ PyAnsys Heart is a `Python`_ framework for heart modeling using Ansys tools. Explore examples to learn how to use PyAnsys Heart. - .. grid-item-card:: :material-regular:`people;1.25em` Contributing + .. grid-item-card:: :material-regular:`people;1.25em` Contribute :link: contributing :link-type: doc diff --git a/doc/source/links.rst b/doc/source/links.rst index 586981759..f2ca9b0b6 100644 --- a/doc/source/links.rst +++ b/doc/source/links.rst @@ -6,6 +6,7 @@ .. Ansys .. _Ansys Customer Portal: https://support.ansys.com/Home/HomePage +.. _Ansys LSDYNA Product Space: https://lsdyna.ansys.com/download-install-overview/ .. Python libraries @@ -21,15 +22,15 @@ .. _Markdown: https://www.markdownguide.org/ .. _Python documentation: https://docs.python.org/3/library/venv.html -.. PyAnsys Developer Guide +.. PyAnsys developer's guide -.. _PyAnsys Developer's Guide: https://dev.docs.pyansys.com/ +.. _PyAnsys developer's guide: https://dev.docs.pyansys.com/ .. PyAnsys Heart repository links .. _PyAnsys Heart repository: https://github.com/ansys/pyansys-heart -.. _PyAnsys Heart issues: https://github.com/ansys/pyansys-heart/issues -.. _PyAnsys Heart discussions: https://github.com/ansys/pyansys-heart/discussions +.. _PyAnsys Heart Issues: https://github.com/ansys/pyansys-heart/issues +.. _PyAnsys Heart Dscussions: https://github.com/ansys/pyansys-heart/discussions .. _PyAnsys Heart documentation: https://heart.health.docs.pyansys.com/version/stable/index.html .. _PyAnsys Heart labels: https://github.com/ansys/pyansys-heart/labels .. _PyAnsys Heart release page: https://github.com/ansys/pyansys-heart/releases @@ -37,3 +38,4 @@ .. Other links .. _Contributor Covenant Code of Conduct: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ + diff --git a/doc/source/user-guide.rst b/doc/source/user-guide.rst index 9ae8993ad..f3cdef3a3 100644 --- a/doc/source/user-guide.rst +++ b/doc/source/user-guide.rst @@ -1,8 +1,8 @@ User guide ========== -This guide provides an overview of the capabilities of PyAnsys Heart, explaining the key concept of preprocessor, writer, simulator, -postprocessor, and more. +This section provides an overview of the capabilities of PyAnsys Heart, explaining key concepts of +the preprocessor, writer, simulator, and postprocessor. .. grid:: 1 2 3 3 :gutter: 1 2 3 3 diff --git a/doc/source/user-guide/postprocessor.rst b/doc/source/user-guide/postprocessor.rst index 964106d6e..155fc9610 100644 --- a/doc/source/user-guide/postprocessor.rst +++ b/doc/source/user-guide/postprocessor.rst @@ -1,8 +1,8 @@ .. _ref_postprocessor: -************* + Postprocessor -************* +============= This section is under development. diff --git a/doc/source/user-guide/preprocessor.rst b/doc/source/user-guide/preprocessor.rst index e7d92cc61..881b19700 100644 --- a/doc/source/user-guide/preprocessor.rst +++ b/doc/source/user-guide/preprocessor.rst @@ -5,4 +5,4 @@ Preprocessor ************ -This section provides an overview of the :attr:`HeartModel ` module. +This topic provides an overview of the :attr:`HeartModel ` module. diff --git a/doc/source/user-guide/simulator.rst b/doc/source/user-guide/simulator.rst index 7dcf65aae..4219ce6e8 100644 --- a/doc/source/user-guide/simulator.rst +++ b/doc/source/user-guide/simulator.rst @@ -1,27 +1,24 @@ - .. _ref_simulator: -********* Simulator -********* - -:attr:`Simulator ` is used to link up different simulation steps for cardiac modeling. For example, for electrophysiology simulations, fiber orientation :attr:`BaseSimulator.compute_fibers` and Purkinje network :attr:`EPSimulator.compute_purkinje` are computed before launching the physical simulation. In mechanical analysis, it is necessary to compute the stress free configuration :attr:`MechanicsSimulator.compute_stress_free_configuration` before running the simulation. +========= +The `Simulator <:attr:`Simulator ` module links different simulation steps for cardiac modeling. For example, in electrophysiology simulations, you compute fiber orientation and the Purkinje network using the :attr:`BaseSimulator.compute_fibers` and Purkinje network :attr:`EPSimulator.compute_purkinje` methods before you run the physical simulation. In mechanical simulations, you must compute the stress-free configuration using the :attr:`MechanicsSimulator.compute_stress_free_configuration` method before running the simulation. -Based on different applications, different simulators need to be created. +Different simulators must be created based on the application: - - :attr:`BaseSimulator`, parent class for all other Simulators, it holds general methods, like fiber generation. - - :attr:`EPSimulator`, used for running electrophysiology cardiac simulation - - :attr:`MechanicsSimulator`, used for running mechanical cardiac simulation - - :attr:`EPMechanicsSimulator`, used for running electrical-mechanical coupled cardiac simulation +- :attr:`BaseSimulator`: The parent class for all other simulators. It holds general methods, such as fiber generation. +- :attr:`EPSimulator`: Runs electrophysiology cardiac simulations. +- :attr:`MechanicsSimulator`: Runs mechanical cardiac simulations. +- :attr:`EPMechanicsSimulator`: Runs electrical-mechanical coupled cardiac simulations. -A simple usage example is given in the following: +Here is a simple code example: ->>> # Get a heart model +>>> # get a heart model >>> import ansys.health.heart.models as models >>> model = models.HeartModel.load_model("path_to_model") ->>> # Set up a LS-DYNA executable +>>> # set up an LS-DYNA executable >>> from ansys.heart.simulator.simulator import DynaSettings, MechanicsSimulator >>> dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, @@ -34,18 +31,19 @@ A simple usage example is given in the following: dyna_settings=dyna_settings, simulation_directory="output-path") -Default modeling parameters are saved :attr:`here `, you can load them to the simulator: +Default modeling parameters are saved to the :attr:`ansys.heart.simulator.settings.defaults` attribute. +You can load them into the simulator: .. code:: pycon >>> simulator.settings.load_defaults() - >>> # we can print settings + >>> # Print settings >>> print(simulator.settings.mechanics.analysis.end_time) 800 millisecond - >>> # let's change it to 1600 ms + >>> # Change it to 1600 ms >>> simulator.settings.mechanics.analysis.end_time = Quantity(1600, "ms") -Alternatively, settings can be load from a yaml file as follow +Alternatively, you can load settings from a YAML file: >>> simulator.settings.load("a-yaml-file") diff --git a/doc/source/user-guide/writer.rst b/doc/source/user-guide/writer.rst index cb24c7872..99107c35a 100644 --- a/doc/source/user-guide/writer.rst +++ b/doc/source/user-guide/writer.rst @@ -1,22 +1,21 @@ .. _ref_writer: -****** Writer -****** +====== -:attr:`DynaWriter ` is used to generate LS-DYNA input files for different simulations. +The :attr:`DynaWriter ` base class generates LS-DYNA input files for different simulations. -Based on different applications, different Writers need to be created. +Based on different applications, different writers must be created. - - :attr:`PurkinjeGenerationDynaWriter`, to generate a LS-DYNA input deck for creating Purkinje network. - - :attr:`FiberGenerationDynaWriter`, to generate a LS-DYNA input deck for creating fibers orientation vectors. - - :attr:`MechanicsDynaWriter`, to generate a LS-DYNA input deck for mechanical simulations - - :attr:`ZeroPressureMechanicsDynaWriter`, to generate a LS-DYNA input deck for stress free configuration simulations - - :attr:`ElectrophysiologyDynaWriter`, to generate a LS-DYNA input deck for electrophysiology simulations - - :attr:`ElectroMechanicsDynaWriter`, to generate a LS-DYNA input deck for electrical-mecahnical coupled simulations +- :attr:`PurkinjeGenerationDynaWriter`: Generates an LS-DYNA input deck for creating a Purkinje network. +- :attr:`FiberGenerationDynaWriter`: Generates an LS-DYNA input deck for creating fiber orientation vectors. +- :attr:`MechanicsDynaWriter`: Generates an LS-DYNA input deck for mechanical simulations. +- :attr:`ZeroPressureMechanicsDynaWriter`: Generates an LS-DYNA input deck for stress-free configuration simulations. +- :attr:`ElectrophysiologyDynaWriter`: Generates an LS-DYNA input deck for electrophysiology simulations. +- :attr:`ElectroMechanicsDynaWriter`: Generates an LS-DYNA input deck for electrical-mechanical coupled simulations. -A simple use example is given as the following: +Here is a simple code example: >>> # Get a heart model >>> import ansys.health.heart.models as models diff --git a/doc/styles/config/vocabularies/ANSYS/accept.txt b/doc/styles/config/vocabularies/ANSYS/accept.txt index 72bfb9237..5d27a329c 100644 --- a/doc/styles/config/vocabularies/ANSYS/accept.txt +++ b/doc/styles/config/vocabularies/ANSYS/accept.txt @@ -39,4 +39,9 @@ Alternans simulator.settings.settings.EPAnalysis.solvertype Rodero (?i)GitHub -PyPI \ No newline at end of file +PyPI +Tox +(?i)Zenodo +namespace +subpackages +venv diff --git a/examples/README.rst b/examples/README.rst index 39a3be1e2..395f5fb2a 100644 --- a/examples/README.rst +++ b/examples/README.rst @@ -1,4 +1,4 @@ Examples ======== -These examples show you how you can use the preprocessor and simulator modules -to preprocess, consume and run heart models. Examples include electrophysiology and mechanics. \ No newline at end of file +These examples show how to use the Preprocessor, Postprocessor, and Simulator modules +to preprocess, consume, and consume heart models. Examples include electrophysiology and mechanics. \ No newline at end of file diff --git a/examples/postprocessor/README.rst b/examples/postprocessor/README.rst index 51fa3f985..4c8417be5 100644 --- a/examples/postprocessor/README.rst +++ b/examples/postprocessor/README.rst @@ -1,4 +1,4 @@ Postprocessor examples ====================== -These examples show you how you can use the postprocessor module +These examples show how to use the Postprocessor module to handle simulation data generated by LS-DYNA. \ No newline at end of file diff --git a/examples/postprocessor/doc_autopost_mechanical_simulation.py b/examples/postprocessor/doc_autopost_mechanical_simulation.py index 19f7bd3a8..4b2c6b408 100644 --- a/examples/postprocessor/doc_autopost_mechanical_simulation.py +++ b/examples/postprocessor/doc_autopost_mechanical_simulation.py @@ -22,18 +22,17 @@ """ -Post process mechanical simulation folder ------------------------------------------ -This example shows you how to use post process script after mechanical simulation. +Postprocess the mechanical simulation folder +-------------------------------------------- +This example shows how to use the postprocess script after a mechanical simulation. """ ############################################################################### -# Perform the required imports -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Import the required modules +# Perform required imports +# ~~~~~~~~~~~~~~~~~~~~~~~~ # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/thumbnails/pv.png' # sphinx_gallery_end_ignore import os @@ -66,7 +65,7 @@ ############################################################################### # Create PV loop # ~~~~~~~~~~~~~~ -# Pressure-volume loop figure is an important metric for heart function +# A PV (Pressure-Volume) loop is an important metric for heart function. system = SystemModelPost(meca_folder) fig = system.plot_pv_loop() plt.show() @@ -76,14 +75,14 @@ # :width: 300pt # :align: center -# You can generate a series of png by setting start and end time (in second) +# You can generate a series of PNG files by setting start and end times (in seconds). for it, tt in enumerate(np.linspace(0.001, 3, 60)): # assume heart beat once per 1s fig = system.plot_pv_loop(t_start=0, t_end=tt) fig.savefig("pv_{0:d}.png".format(it)) plt.close() ############################################################################### -# An animation can be created by +# You can create an animation. # `ffmpeg -f image2 -i pv_%d.png pv_loop.mp4` @@ -97,9 +96,9 @@ ############################################################################### -# Myocardium wall strain -# ~~~~~~~~~~~~~~~~~~~~~~ -# Compute left ventricle strain in longitudinal, radial, circumferential directions +# Compute myocardium wall strain +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute left ventricle strain in longitudinal, radial, circumferential directions. aha_evaluator = AhaStrainCalculator(model, d3plot_file=meca_folder / "d3plot") # get LRC strain at a given time and export a file named LRC_10.vtk @@ -130,7 +129,7 @@ # :width: 400pt # :align: center -# get strain for all simulation frames (this will take a while) +# get strain for all simulation frames, which takes a while strain_table = aha_evaluator.compute_aha_strain(out_dir=".", write_vtk=False) # plot @@ -152,14 +151,14 @@ ############################################################################### # Run with default process scripts # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# All above steps are encapsulated in one script: +# All of the preceding steps are encapsulated in one script: mech_post(meca_folder, model) ############################################################################### -# You can open Paraview and load the state file +# You can open Paraview, load the state file # :download:`post_main2.pvsm <../../_static/others/post_main2.pvsm>`, -# and specify the folder. +# and specify the directory. ############################################################################### # .. only:: html diff --git a/examples/postprocessor/doc_autopost_zerop_simulation.py b/examples/postprocessor/doc_autopost_zerop_simulation.py index b260f0467..0994ecca7 100644 --- a/examples/postprocessor/doc_autopost_zerop_simulation.py +++ b/examples/postprocessor/doc_autopost_zerop_simulation.py @@ -22,19 +22,18 @@ """ -Post process Zero pressure folder ---------------------------------- -This example shows you how to use post process script after Stress free configuration simulation, -and visualize them in Paraview. +Postprocess a zero pressure folder +---------------------------------- +This example shows how to use the postprocess script after stress-free configuration simulation +and view results in Paraview. """ ############################################################################### -# Perform the required imports -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Import the required modules +# Perform required imports +# ~~~~~~~~~~~~~~~~~~~~~~~~ # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we must put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/thumbnails/klotz.png' # sphinx_gallery_end_ignore import os @@ -60,22 +59,24 @@ ############################################################################### # Run default process scripts # ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# a folder "post" will be created with key simulation results (json, png, vtk...) +# A folder named ``post`` is created with key simulation results in various +# file formats, such as JSON, PNG, or VTK. + zerop_post(zerop_folder, model) ############################################################################### -# In this folder, you will be able to find +# Files in this folder include the Klotz curve and iteration information. ############################################################################### # Klotz curve -# ~~~~~~~~~~~ +# ^^^^^^^^^^^ # .. image:: /_static/images/klotz.png # :width: 400pt # :align: center ############################################################################### -# Iteration info -# ~~~~~~~~~~~~~~ +# Iteration information +# ^^^^^^^^^^^^^^^^^^^^^ { "Simulation output time (ms)": [ 0.0, @@ -113,11 +114,11 @@ } ############################################################################### -# Visualization in Paraview -# ~~~~~~~~~~~~~~~~~~~~~~~~~ -# Open Paraview and load the state file +# View results in Paraview +# ~~~~~~~~~~~~~~~~~~~~~~~~ +# Open Paraview, load the state file # :download:`post_zerop2.pvsm <../../_static/others/post_zerop2.pvsm>`, -# and specify the folder +# and specify the directory. ############################################################################### # .. image:: /_static/images/load_zerop.png @@ -125,7 +126,7 @@ # :align: center ############################################################################### -# You can compare the end-of-diastolic geometry between input and after inflation +# You can compare the end-of-diastolic geometry between input and after inflation. ############################################################################### # .. image:: /_static/images/EDcavity_simu_real.png @@ -133,7 +134,7 @@ # :align: center ############################################################################### -# You can show and export inflation animation +# You can show and export inflation animation. ############################################################################### # .. only:: html diff --git a/examples/postprocessor/doc_postproc_ep_simulation.py b/examples/postprocessor/doc_postproc_ep_simulation.py index 58b54dded..837b5dae8 100644 --- a/examples/postprocessor/doc_postproc_ep_simulation.py +++ b/examples/postprocessor/doc_postproc_ep_simulation.py @@ -21,15 +21,14 @@ # SOFTWARE. """ -Post process EP simulation --------------------------- -This example shows you how to post process an EP simulation. +Postprocess an EP simulation +---------------------------- +This example shows how to postprocess an EP simulation. """ ############################################################################### -# Perform the required imports -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Import the required modules +# Perform required imports +# ~~~~~~~~~~~~~~~~~~~~~~~~ # sphinx_gallery_start_ignore # sphinx_gallery_thumbnail_path = '_static/images/ep_post_activationtime.png' @@ -52,17 +51,15 @@ / "d3plot" ) ############################################################################### -# Instantiate the Postprocessor +# Instantiate the postprocessor # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate Postprocessor postproc = EPpostprocessor(results_path=ep_folder) ############################################################################### -# 12-LEAD ECGs -# ~~~~~~~~~~~~~~~~ -# Plot 12-Lead ECGs +# Plot 12-LEAD ECGs +# ~~~~~~~~~~~~~~~~~ path_to_ecg_file = ep_folder.parent / "em_EKG_001.dat" @@ -77,9 +74,9 @@ # :align: center ############################################################################### -# Activation times -# ~~~~~~~~~~~~~~~~ -# Get activation times and plot the field +# Plot activation times +# ~~~~~~~~~~~~~~~~~~~~~ +# Get the field with activation times and plot them. activation_time_field = postproc.get_activation_times() activation_time_field.plot(show_edges=False) @@ -88,21 +85,26 @@ # :width: 300pt # :align: center +############################################################################### # Compute total activation time +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute the total activation time. + activation_time_data = activation_time_field.data_as_list total_acctivation_time = max(activation_time_data) - min(activation_time_data) print("Total activation time: " + str(total_acctivation_time) + " ms") ############################################################################### -# Transmembrane potentials -# ~~~~~~~~~~~~~~~~~~~~~~~~ -# Get transmembrane potentials on list of nodes and plot +# Get transmembrane potentials +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Get transmembrane potentials on a list of nodes and plot. + vm, times = postproc.get_transmembrane_potential(node_id=[0, 1, 100], plot=True) ############################################################################### # .. image:: /_static/images/ep_tm.png # :width: 300pt # :align: center -# Animate and export in vtk format +# Animate and export in VTK format postproc.export_transmembrane_to_vtk() postproc.animate_transmembrane() diff --git a/examples/preprocessor/README.rst b/examples/preprocessor/README.rst index 13f40f5a8..de5a1f702 100644 --- a/examples/preprocessor/README.rst +++ b/examples/preprocessor/README.rst @@ -1,5 +1,5 @@ Preprocessor examples ===================== -These examples show you how you can use the preprocessor module -to generate different kinds of heart models. For instance, a -biventricular, four chamber, or full heart model. \ No newline at end of file +These examples show how to use the Preprocessor module +to generate different kinds of heart models, including a +biventricular, four-chamber heart model and a full-heart model. \ No newline at end of file diff --git a/examples/preprocessor/demo_material_pr.py b/examples/preprocessor/demo_material_pr.py index d8909d064..776017a41 100644 --- a/examples/preprocessor/demo_material_pr.py +++ b/examples/preprocessor/demo_material_pr.py @@ -24,7 +24,7 @@ Define materials ---------------- -This example show you how to create a mechanical material and assign it to a heart part. +This example shows how to create a mechanical material and assign it to a heart part. """ ############################################################################### @@ -52,30 +52,28 @@ ############################################################################### # .. note:: -# Unit system used for heart modeling in LS-DYNA is ["MPa", "mm", "N", "ms", "g"] - +# The unit system for heart modeling in LS-DYNA is ``["MPa", "mm", "N", "ms", "g"]``. ############################################################################### # Create a material # ~~~~~~~~~~~~~~~~~ - -# Neo-Hookean material can be created as following +# Create a Neo-Hookean material as follows. neo = NeoHookean(rho=0.001, c10=1, nu=0.499) ############################################################################### -## The recommended approach is to create a Neo-Hookean material by -# activating only the isotropic module in MAT295. +# The recommended way to create a Neo-Hookean material is by +# activating only the isotropic module in MAT_295. neo2 = Mat295(rho=0.001, iso=ISO(itype=1, beta=2, kappa=1, mu1=0.05, alpha1=2)) ############################################################################### # .. note:: -# Please refer to LS-DYNA manual for more details of MAT_295 +# For more information on MAT_295, see the `LS-DYNA manuals `_. -# More steps to create MAT295 which is used for myocardium +# Additional steps follow for creating MAT_295, which is used for myocardium. # step 1: create an isotropic module iso = ISO(k1=1, k2=1, kappa=100) -# step 2: create an anisotropoc moddule +# step 2: create an anisotropic module fiber = ANISO.HGOFiber(k1=1, k2=1) aniso1 = ANISO(fibers=[fiber]) @@ -93,10 +91,13 @@ # build active module active = ACTIVE(model=ac_model1, ca2_curve=ac_curve1) -## Active model 1 needs a constant ca2ion -# but the curve needs to cross threshold at every start of heart beat +## Active model 1 must have a constant ca2ion, +# but the curve must cross the threshold at every start of the heart beat. -# You can plot Ca2+ with threshold +############################################################################### +# Plot Ca2+ with threshold +# ~~~~~~~~~~~~~~~~~~~~~~~~ +# Plot Ca2+ with the threshold. fig = active.ca2_curve.plot_time_vs_ca2() plt.show() @@ -110,10 +111,10 @@ ############################################################################### # .. note:: -# With setting eta=0 is model 3, stress curve will be the active stress for all elements. -# If eta!=0, this is idealized active stress when fiber stretch stays to 1. +# When eta=0 in model 3, the stress curve is the active stress for all elements. +# If eta!=0, this is the idealized active stress when fiber stretch stays at 1. -# PyAnsys-Heart will convert the stress curve to Ca2+ curve (input of MAT_295) +# PyAnsys Heart converts the stress curve to Ca2+ curve (input of MAT_295) fig = ac_curve3.plot_time_vs_ca2() plt.show() @@ -121,13 +122,16 @@ active3 = ACTIVE(model=ac_model3, ca2_curve=ac_curve3) ############################################################################### -# Finally, MAT295 can be created with the above modules +# Create MAT_295 with modules +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Create MAT_295 with the preceding modules. iso_mat = Mat295(rho=1, iso=iso, aniso=None, active=None) passive_mat = Mat295(rho=1, iso=iso, aniso=aniso1, active=None) active_mat = Mat295(rho=1, iso=iso, aniso=aniso1, active=active) ############################################################################### -# EP materials can be created as follows +# Create EP materials +# ~~~~~~~~~~~~~~~~~~~ ep_mat_active = EPMaterial.Active( sigma_fiber=1, sigma_sheet=0.5, beta=140, cm=0.01, cell_model=CellModel.Tentusscher() ) @@ -138,45 +142,47 @@ # ############################################################################## # .. note:: -# Ca2+ curve will be ignored if the simulation is coupled with electrophysiology -# -# ############################################################################## -# Assign material to a part -# ~~~~~~~~~~~~~~~~~~~~~~~~~ -# Assign the materials to the heart model +# The Ca2+ curve is ignored if the simulation is coupled with electrophysiology. + +############################################################################### +# Assign materials to a part +# ~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Assign the materials to the heart model. ############################################################################### # Load a heart model +# ~~~~~~~~~~~~~~~~~~ + ############################################################################### # .. note:: -# You need to complete the full heart preprocessing example first. +# You must complete the full heart preprocessing example first. + import ansys.health.heart.examples as examples import ansys.health.heart.models as models heart_model_vtu, heart_model_partinfo, _ = examples.get_preprocessed_fullheart() workdir = str(Path.home() / "pyansys-heart" / "Rodero2021") -# load a full heart model. +# Load a full-heart model. heartmodel: models.FullHeart = models.FullHeart(working_directory=workdir) heartmodel.load_model_from_mesh(heart_model_vtu, heart_model_partinfo) -heartmodel.mesh.set_active_scalars("_volume-id") -heartmodel.mesh.plot() - -# Print default materials +# Print the default material. You should see that the material is empty. print(heartmodel.left_ventricle.meca_material) print(heartmodel.left_ventricle.ep_material) ############################################################################### # .. note:: -# If no material is set before writing k files, default material from ```settings``` -# will be set. +# If no material is set before writing k files, the default material +# from the ``settings`` object is used. -# Assign the material we just created +# Assign the material that you just created. heartmodel.left_ventricle.meca_material = active_mat heartmodel.left_ventricle.ep_material = ep_mat_active +# Print it. You should see the following: +# MAT295(rho=1, iso=ISO(itype=-3, beta=0.0, nu=0.499, k1=1, k2=1), aopt=2.0, aniso=ANISO(atype=-1, fibers=[ANISO.HGOFiber(k1=1, k2=1, a=0.0, b=1.0, _theta=0.0, _ftype=1, _fcid=0)], k1fs=None, k2fs=None, vec_a=(1.0, 0.0, 0.0), vec_d=(0.0, 1.0, 0.0), nf=1, intype=0), active=ActiveModel.Model1(t0=None, ca2ion=None, ca2ionm=4.35, n=2, taumax=0.125, stf=0.0, b=4.75, l0=1.58, l=1.85, dtmax=150, mr=1048.9, tr=-1629.0)) # noqa print(heartmodel.left_ventricle.meca_material) print(heartmodel.left_ventricle.ep_material) ############################################################################### diff --git a/examples/preprocessor/doc_example_stimulation.py b/examples/preprocessor/doc_example_stimulation.py index 4196bef30..91df37156 100644 --- a/examples/preprocessor/doc_example_stimulation.py +++ b/examples/preprocessor/doc_example_stimulation.py @@ -21,26 +21,22 @@ # SOFTWARE. """ +.. _stimulation_definition_example: -Stimulation definition example ---------------------------------- -This example shows you how to define an EP stimulation. It demonstrates how you -can load a pre-computed heart model, define a stimulation region based on a sphere -centered on the apex, and a sphere centered on a point chosen in Universal -Ventricular Coordinates (UVC). +Define an EP stimulation +------------------------ +This example shows how to define an EP stimulation. It loads a pre-computed heart +model and defines a stimulation region based on a sphere centered on the apex and +a sphere centered on a point chosen in UVCs (Universal Ventricular Coordinates). """ ############################################################################### -# Example setup -# ------------- -# Loading required modules and heart model. -# # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths. # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/stimulation.png' # sphinx_gallery_end_ignore @@ -54,7 +50,7 @@ from ansys.health.heart.settings.settings import SimulationSettings, Stimulation from ansys.health.heart.simulator import DynaSettings, EPSimulator -# accept dpf license agreement +# Accept the DPF license agreement. # https://dpf.docs.pyansys.com/version/stable/getting_started/licensing.html#ref-licensing os.environ["ANSYS_DPF_ACCEPT_LA"] = "Y" @@ -63,21 +59,21 @@ # sphinx_gallery_end_ignore -# specify the path to the working directory and heart model. The following path assumes -# that a preprocessed model is already available +# Specify the path to the working directory and heart model. The following path assumes +# that a preprocessed model is already available. workdir = Path.home() / "pyansys-heart" / "downloads" / "Strocchi2020" / "01" / "FourChamber" path_to_model = str(workdir / "heart_model.vtu") -# load your four chamber heart model with uvcs (see preprocessor examples to create -# a heart model from scratch) +# Load your four-chamber heart model with UVCs. (See the preprocessor examples to create +# a heart model from scratch.) model: models.FourChamber = models.FourChamber(working_directory=workdir) model.load_model_from_mesh(path_to_model) ############################################################################### # Define stimulation at the apex # ------------------------------ -# Select points inside sphere centered at the left apex. +# Select points inside the sphere centered at the left apex. apex_left = model.left_ventricle.apex_points[0].xyz sphere = pv.Sphere(center=(apex_left), radius=2) newdata = model.mesh.select_enclosed_points(sphere) @@ -89,14 +85,14 @@ pl.add_mesh(model.mesh, color="lightgrey", opacity=0.2) pl.show() -# Define stimulation and introduce it as simulation settings +# Define stimulation and introduce it as simulation settings. stim_apex = Stimulation(node_ids=list(node_ids), t_start=0, period=800, duration=2, amplitude=50) settings = SimulationSettings() settings.load_defaults() settings.electrophysiology.stimulation = {"stim_apex": stim_apex} -# Define auxiliary function to find a point in the model based on its UVC coordinates +# Define auxiliary function to find a point in the model based on its UVCs. def get_point_from_uvc( model: models.HeartModel, apicobasal: float, transmural: float, rotational: float ): @@ -121,10 +117,10 @@ def get_point_from_uvc( ############################################################################### -# Define stimulation based on UVC -# ------------------------------- -# Select points inside sphere centered at a chosen point based on UVC coordinates -# (if the model has UVC). +# Define stimulation using UVCs +# ----------------------------- +# Select points inside the sphere centered at a chosen point based on UVCs +# (if the model includes UVCs). if ( ("transmural" in model.mesh.point_data.keys()) and ("apico-basal" in model.mesh.point_data.keys()) @@ -146,11 +142,11 @@ def get_point_from_uvc( stim_uvc = Stimulation(node_ids=list(node_ids), t_start=0, period=800, duration=2, amplitude=50) settings.electrophysiology.stimulation["stim_uvc"] = stim_uvc -# specify LS-DYNA path +# Specify LS-DYNA path. lsdyna_path = r"ls-dyna_msmpi.exe" -# instantaiate dyna settings of choice +# Instantiate DYNA settings of choice. dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="intelmpi", num_cpus=4, platform="wsl" ) diff --git a/examples/preprocessor/doc_example_uhc.py b/examples/preprocessor/doc_example_uhc.py index 3b4dfe2ee..acc74b0a5 100644 --- a/examples/preprocessor/doc_example_uhc.py +++ b/examples/preprocessor/doc_example_uhc.py @@ -22,20 +22,20 @@ """ -UHC example --------------------- -This example shows how to compute universal heart coordinates (UHC) for +Compute UHCs for the ventricles +------------------------------- +This example shows how to compute UHCs (universal heart coordinates) for the ventricles. """ ############################################################################### -# Perform the required imports -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable (uses DEV-104373-g6d20c20aee). +# Import required modules +# ~~~~~~~~~~~~~~~~~~~~~~~ +# Import the necessary modules and set relevant paths, including the working +# directory, model, and LS-DYNA executable file. This example uses DEV-104373-g6d20c20aee. # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/thumbnails/uvc.png' # sphinx_gallery_end_ignore @@ -48,24 +48,24 @@ import ansys.health.heart.models as models from ansys.health.heart.simulator import BaseSimulator, DynaSettings -# specify the path to the working directory and heart model. The following path assumes -# that a preprocessed model is already available +# Specify the path to the working directory and heart model. The following path assumes +# that a preprocessed model is already available. workdir = Path.home() / "pyansys-heart" / "downloads" / "Strocchi2020" / "01" / "FourChamber" path_to_model = str(workdir / "heart_model.vtu") -# specify LS-DYNA path +# Specify LS-DYNA path lsdyna_path = r"ls-dyna_smp" -# load heart model. +# Load heart model model: models.FourChamber = models.FourChamber(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate simulator. Change options where necessary. +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate the simulator and modify options as needed. -# instantiate dyna settings of choice +# Instantiate DYNA settings of choice dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="intelmpi", @@ -79,18 +79,18 @@ ) ############################################################################### -# Compute UHC -# ~~~~~~~~~~~ -# Compute UHC using Laplace Dirichlet method. +# Compute UHCs +# ~~~~~~~~~~~~ +# Compute UHCs using the Laplace-Dirichlet Rule-Based (LDRB) method. simulator.compute_uhc() ############################################################################### # .. note:: -# There are several definitions for UHC (see https://github.com/KIT-IBT/Cobiveco). -# Here, a simple approach is taken and the -# Dirichlet conditions are shown below. At rotational direction, the start (pi), end (-pi) -# and middle (0) points are defined from four-cavity long axis cut view. +# Several definitions for UHC exist. (See https://github.com/KIT-IBT/Cobiveco.) +# This example uses a simple approach. The following image shows the +# Dirichlet conditions. For the rotational direction, the start (pi), end (-pi), +# and middle (0) points are defined from the four-cavity long axis cut view. ############################################################################### # .. image:: /_static/images/uvc_bc.png @@ -98,10 +98,10 @@ # :align: center ############################################################################### -# Visualization of UVCs -# ~~~~~~~~~~~~~~~~~~~~~ -# UVC is assigned back to the full model automatically -# Atrial points are padded with NaN's +# Visualize UHCs +# ~~~~~~~~~~~~~~ +# The simulator automatically assigns UHCs back to the full model. +# Atrial points are padded with NaNs. plotter = pv.Plotter(shape=(1, 3)) diff --git a/examples/preprocessor/doc_preprocess_fourchamber.py b/examples/preprocessor/doc_preprocess_fourchamber.py index 1e3af4cfe..897567489 100644 --- a/examples/preprocessor/doc_preprocess_fourchamber.py +++ b/examples/preprocessor/doc_preprocess_fourchamber.py @@ -22,19 +22,17 @@ """ -Create a four chamber heart model +Create a four-chamber heart model --------------------------------- -This example shows you how to process a case file from the Strocchi2020 database -and process that into a simulation-ready full heart model. +This example shows how to process a CASE file from the Strocchi 2020 database +and process that into a simulation-ready full-heart model. """ ############################################################################### -# Example setup -# ------------- # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory and generated model +# directory and generated model. # sphinx_gallery_start_ignore # sphinx_gallery_thumbnail_path = '_static/images/four_chamber_mesh.png' @@ -47,48 +45,50 @@ import ansys.health.heart.models as models from ansys.health.heart.pre.database_utils import get_compatible_input -# Use Fluent 24.1 for meshing. +# Use Fluent 2024 R1 for meshing. import ansys.health.heart.pre.mesher as mesher from ansys.health.heart.utils.download import download_case_from_zenodo, unpack_case mesher._fluent_version = "24.1" -# Download and unpack the case in a dedicated folder, in this case the home directory. +# Download and unpack the CASE file in a dedicated directory. This example uses the home directory. download_folder = Path.home() / "pyansys-heart" / "downloads" tar_file = download_case_from_zenodo("Strocchi2020", 1, download_folder, overwrite=False) case_file = unpack_case(tar_file) -# specify a working directory. Here we use the same directory as the case file. +# Specify a working directory. This example uses the same directory as the CASE file. workdir = os.path.join(os.path.dirname(case_file), "FourChamber") if not os.path.isdir(workdir): os.makedirs(workdir) -# specify paths to the model, input and part definitions. +# Specify paths to the model, input, and part definitions. path_to_model = os.path.join(workdir, "heart_model.vtu") path_to_input = os.path.join(workdir, "input_model.vtp") path_to_part_definitions = os.path.join(workdir, "part_definitions.json") ############################################################################### # .. note:: -# You may need to (manually) download the .case or .vtk files from the Strocchi2020 -# and Rodero2021 databases first. See: +# You can also manually download the CASE or VTK files from the Strocchi 2020 +# and Rodero 2021 databases. For more information, see: # -# - https://zenodo.org/records/3890034 -# - https://zenodo.org/records/4590294 +# - `A Publicly Available Virtual Cohort of Four-chamber Heart Meshes for Cardiac +# Electro-mechanics Simulations `_ +# - `Virtual cohort of adult healthy four-chamber heart meshes from CT images `_ # -# Alternatively you can make use of the download -# module instead. See the download module. +# Alternatively, you can simply click one of the buttons at the bottom of this page +# to download a CASE file for the Rodero 2021 database in an IPYNB, PY, or ZIP format. ############################################################################### -# Convert the .vtk file into compatible input -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Convert the VTK file to a compatible input format +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Convert the VTK file to a compatible input format, such as a JSON file. input_geom, part_definitions = get_compatible_input( case_file, model_type="FourChamber", database="Strocchi2020" ) -# Note that the input model and part definitions can be used for later use. -# save input geometry and part definitions: +# Note that the input model and part definitions can be saved for later use. +# Save input geometry and part definitions. input_geom.save(path_to_input) with open(path_to_part_definitions, "w") as f: json.dump(part_definitions, f, indent=True) @@ -96,42 +96,42 @@ ############################################################################### # Create a heart model # ~~~~~~~~~~~~~~~~~~~~ -# Initialize the desired heart model by giving a working directory. +# Create the desired heart model by giving a working directory. -# initialize a four chamber heart model +# Initialize a four-chamber heart model model = models.FourChamber(working_directory=workdir) -# load input model generated in an earlier step. +# Load input model generated in an earlier step. model.load_input(input_geom, part_definitions, "surface-id") -# mesh the volume of all structural parts. +# Mesh the volume of all structural parts. model.mesh_volume(use_wrapper=True, global_mesh_size=1.5) -# update the model and extract the required (anatomical) features +# Update the model and extract the required anatomical features. model._update_parts() -# dump the model to disk +# Dump the model to disk. model.save_model(path_to_model) -# Optionally save the simulation mesh as a vtk object for "offline" inspection +# Optionally save the simulation mesh as a VTK object for "offline" inspection. model.mesh.save(os.path.join(model.workdir, "simulation-mesh.vtu")) model.save_model(os.path.join(model.workdir, "heart_model.vtu")) -# print some info about the processed model. +# Print some information about the processed model. print(model) -# print part names +# Print part names. print(model.part_names) ############################################################################### # Visualize results # ~~~~~~~~~~~~~~~~~ # You can visualize and inspect the components of the model by accessing -# various properties/attributes and invoke methods. +# various properties or attributes and invoking methods. print(f"Volume of LV cavity: {model.left_ventricle.cavity.volume} mm^3") print(f"Volume of LV cavity: {model.left_atrium.cavity.volume} mm^3") -# plot the remeshed model +# Plot the remeshed model. model.plot_mesh(show_edges=False) ############################################################################### @@ -139,7 +139,7 @@ # :width: 400pt # :align: center -# plot the endocardial surface of the left ventricle. +# Plot the endocardial surface of the left ventricle. model.left_ventricle.endocardium.plot(show_edges=True, color="r") ############################################################################### @@ -147,7 +147,7 @@ # :width: 400pt # :align: center -# loop over all cavities and plot these in a single window. +# Loop over all cavities and plot them in a single window with PyVista. import pyvista as pv cavities = pv.PolyData() @@ -161,7 +161,7 @@ # :align: center # sphinx_gallery_start_ignore -# Generate static images for docs. +# Generate static images for the documentation. # from pathlib import Path diff --git a/examples/preprocessor/doc_preprocess_fullheart_rodero_01.py b/examples/preprocessor/doc_preprocess_fullheart_rodero_01.py index 9918d9e82..dadc2d035 100644 --- a/examples/preprocessor/doc_preprocess_fullheart_rodero_01.py +++ b/examples/preprocessor/doc_preprocess_fullheart_rodero_01.py @@ -22,19 +22,17 @@ """ -Create a full heart model ---------------------------------- -This example shows you how to process a case from Rodero et al (2021) into +Create a full-heart model +------------------------- +This example shows how to process a case from `Rodero et al.`_ (2021) into a simulation-ready heart model. """ ############################################################################### -# Example setup -# ------------- # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory and generated model +# directory and generated model. # sphinx_gallery_start_ignore # sphinx_gallery_thumbnail_path = '/_static/images/full_heart_mesh.png' @@ -47,51 +45,52 @@ import ansys.health.heart.models as models from ansys.health.heart.pre.database_utils import get_compatible_input -# Use Fluent 24.1 for meshing. +# Use Fluent 2024 R1 for meshing import ansys.health.heart.pre.mesher as mesher from ansys.health.heart.utils.download import download_case_from_zenodo, unpack_case mesher._fluent_version = "24.1" -# specify a download directory. +# specify a download directory download_folder = Path.home() / "pyansys-heart" / "downloads" -# download a compatible case from the Zenodo database. +# Download a compatible case from the Zenodo database. tar_file = download_case_from_zenodo("Rodero2021", 1, download_folder, overwrite=False) -# unpack the case to get the unput .case/.vtk file. +# Unpack the case to get the input CASE or VTK file. case_file = unpack_case(tar_file) -# specify working directory. Here we use the directory of the case file. +# Specify the working directory. This code uses the directory of the CASE file. workdir = os.path.join(os.path.dirname(case_file), "FullHeart") if not os.path.isdir(workdir): os.makedirs(workdir) -# specify paths to the model, input, and part definitions. +# Specify paths to the model, input, and part definitions. path_to_model = os.path.join(workdir, "heart_model.vtu") path_to_input = os.path.join(workdir, "input_model.vtp") path_to_part_definitions = os.path.join(workdir, "part_definitions.json") ############################################################################### # .. note:: -# You may need to (manually) download the .case or .vtk files from the Strocchi2020 -# and Rodero2021 databases first. See: +# You can also manually download the CASE or VTK files from the Strocchi 2020 +# and Rodero 2021 databases. For more information, see: # -# - https://zenodo.org/records/3890034 -# - https://zenodo.org/records/4590294 +# - `A Publicly Available Virtual Cohort of Four-chamber Heart Meshes for +# Cardiac Electro-mechanics Simulations `_ +# - `Virtual cohort of adult healthy four-chamber heart meshes from CT images `_ # -# Alternatively you can make use of the download -# module instead. See the download example. +# Alternatively, you can simply click one of the buttons at the bottom of this page +# to download a CASE file for the Rodero 2021 database in an IPYNB, PY, or ZIP format. ############################################################################### -# Convert the .vtk file into compatible input format -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Convert the VTK file to a compatible input format +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input_geom, part_definitions = get_compatible_input( case_file, model_type="FullHeart", database="Rodero2021" ) -# Note that the input model and part definitions can be used for later use. -# save input geometry and part definitions: +# Note that the input model and part definitions can be saved for later use. +# Save input geometry and part definitions. input_geom.save(path_to_input) with open(path_to_part_definitions, "w") as f: json.dump(part_definitions, f, indent=True) @@ -99,37 +98,37 @@ ############################################################################### # Create a heart model # ~~~~~~~~~~~~~~~~~~~~ -# initialize a full heart model +# Create a full-heart model. model = models.FullHeart(working_directory=workdir) -# load input model generated in an earlier step. +# Load input model generated in an earlier step. model.load_input(input_geom, part_definitions, "surface-id") -# mesh the volume of all structural parts. +# Mesh the volume of all structural parts. model.mesh_volume(use_wrapper=True, global_mesh_size=1.5) -# update the model and extract the required (anatomical) features +# Update the model and extract the required anatomical features. model._update_parts() -# Optionally save the simulation mesh as a vtk object for "offline" inspection +# Optionally save the simulation mesh as a VTK object for "offline" inspection. model.mesh.save(os.path.join(model.workdir, "simulation-mesh.vtu")) model.save_model(os.path.join(model.workdir, "heart_model.vtu")) -# print some info about the processed model. +# Print some information about the processed model. print(model) -# print part names +# Print part names. print(model.part_names) ############################################################################### # Visualize results # ~~~~~~~~~~~~~~~~~ -# You can visualize and inspect the components of the model by accessing -# various properties/attributes and invoke methods. +# Visualize and inspect the components of the model by accessing +# various properties or attributes and invoking methods. print(f"Volume of LV cavity: {model.left_ventricle.cavity.volume} mm^3") print(f"Volume of LV cavity: {model.left_atrium.cavity.volume} mm^3") -# plot the remeshed model +# Plot the remeshed model. model.plot_mesh(show_edges=False) ############################################################################### @@ -137,7 +136,7 @@ # :width: 400pt # :align: center -# plot the endocardial surface of the left ventricle. +# Plot the endocardial surface of the left ventricle. model.left_ventricle.endocardium.plot(show_edges=True, color="r") ############################################################################### @@ -145,7 +144,7 @@ # :width: 400pt # :align: center -# loop over all cavities and plot these in a single window with pyvista. +# Loop over all cavities and plot them in a single window with PyVista. import pyvista as pv cavities = pv.PolyData() @@ -159,7 +158,7 @@ # :align: center # sphinx_gallery_start_ignore -# Generate static images for docs. +# Generate static images for the documentation. # docs_images_folder = Path(Path(__file__).resolve().parents[2], "doc", "source", "_static", "images") diff --git a/examples/preprocessor/download_case_pr.py b/examples/preprocessor/download_case_pr.py index 10f1396f2..6105aac0d 100644 --- a/examples/preprocessor/download_case_pr.py +++ b/examples/preprocessor/download_case_pr.py @@ -22,19 +22,20 @@ """ -Download a PyAnsys - Heart compatible case from Zenodo. -------------------------------------------------------- -This example shows you how to download a Strocchi 2020 or Rodero 2021 case from the Zenodo +Download a PyAnsys Heart-compatible case from Zenodo +---------------------------------------------------- +This example shows how to download a Strocchi 2020 or Rodero 2021 case from the Zenodo database. """ ############################################################################### # .. note:: -# You can also manually download the .case or .vtk files from the Strocchi2020 -# and Rodero2021 databases first. See: +# You can also manually download the CASE or VTK files from the Strocchi 2020 +# and Rodero 2021 databases. For more information, see: # -# - https://zenodo.org/records/3890034 -# - https://zenodo.org/records/4590294 +# - `A Publicly Available Virtual Cohort of Four-chamber Heart Meshes for +# Cardiac Electro-mechanics Simulations `_ +# - `Virtual cohort of adult healthy four-chamber heart meshes from CT images `_ # # Alternatively you can make use of the download # module instead. See the example below. diff --git a/examples/preprocessor/example_atrial_fiber_pr.py b/examples/preprocessor/example_atrial_fiber_pr.py index 1ae087110..61ef49045 100644 --- a/examples/preprocessor/example_atrial_fiber_pr.py +++ b/examples/preprocessor/example_atrial_fiber_pr.py @@ -21,19 +21,20 @@ # SOFTWARE. """ -Atrial fiber ------------- -This examples shows how to generate fibers with the Laplace-Dirichlet-Rule-Based-Method. +Generate atrial fibers +---------------------- +This example shows how to generate atrial fibers using the Laplace-Dirichlet Rule-Based +(LDRB) method. """ ############################################################################### # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable (uses DEV-104373-g6d20c20aee). +# directory, model, and LS-DYNA executable file. This example uses DEV-104373-g6d20c20aee. # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/thumbnails/atrial_fiber.png' # sphinx_gallery_end_ignore @@ -47,30 +48,30 @@ import ansys.health.heart.models as models from ansys.health.heart.simulator import BaseSimulator, DynaSettings -# specify the path to the working directory and heart model. The following path assumes +# Specify the path to the working directory and heart model. The following path assumes # that a preprocessed model is already available workdir = Path.home() / "pyansys-heart" / "downloads" / "Rodero2021" / "01" / "FullHeart" path_to_model, path_to_partinfo, _ = get_preprocessed_fullheart() -# specify LS-DYNA path +# Specify LS-DYNA path lsdyna_path = r"ls-dyna_smp" -# load heart model. +# Load heart model model: models.FourChamber = models.FourChamber(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate simulator. Change options where necessary. +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate the simulator and modify options as needed. ############################################################################### # .. note:: -# The DynaSettings object supports several LS-DYNA versions and platforms. -# Including: "smp", "intelmpi", "msmpi", "windows", "linux", or "wsl" Choose -# the one that is appropriate for you. +# The ``DynaSettings`` object supports several LS-DYNA versions and platforms, +# including ``smp``, ``intempi``, ``msmpi``, ``windows``, ``linux``, and ``wsl``. +# Choose the one that works for your setup. # instantiate LS-DYNA settings of choice dyna_settings = DynaSettings( @@ -85,7 +86,7 @@ simulator.settings.load_defaults() -# remove fiber/sheet information if already exists +# remove fiber/sheet information if it already exists model.mesh.cell_data["fiber"] = np.zeros((model.mesh.n_cells, 3)) model.mesh.cell_data["sheet"] = np.zeros((model.mesh.n_cells, 3)) @@ -93,7 +94,7 @@ # Compute atrial fibers # ~~~~~~~~~~~~~~~~~~~~~ -# Compute left atrium fiber +# compute left atrium fiber la = simulator.compute_left_atrial_fiber() # Appendage apex point should be manually given to compute right atrium fiber @@ -102,8 +103,8 @@ ############################################################################### # .. note:: -# You may need to define an appropriate point for the right atrial appendage -# the list defines the x, y, and z coordinates close to the appendage. +# You might need to define an appropriate point for the right atrial appendage. +# The list specifies the x, y, and z coordinates close to the appendage. ############################################################################### # Plot bundle selection results diff --git a/examples/preprocessor/preprocess_truncated_LV_pr.py b/examples/preprocessor/preprocess_truncated_LV_pr.py index 0642fcfc5..5988d7376 100644 --- a/examples/preprocessor/preprocess_truncated_LV_pr.py +++ b/examples/preprocessor/preprocess_truncated_LV_pr.py @@ -24,18 +24,15 @@ Create a truncated ellipsoid model ---------------------------------- -This example shows you how to build a basic ellipsoidal model -from primitive shapes. Shape based on -`Land et al (2015) `_. +This example shows how to build a basic ellipsoidal model from primitive shapes +based on `Land et al (2015) `_. """ ############################################################################### -# Example setup -# ------------- # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory and generated model +# directory and generated model. import os from pathlib import Path @@ -44,14 +41,14 @@ import ansys.health.heart.models as models -# Use Fluent 24.1 for meshing. +# Use Fluent 2024 R1 for meshing. import ansys.health.heart.pre.mesher as mesher from ansys.health.heart.utils.misc import clean_directory mesher._fluent_version = "24.1" ############################################################################### -# Create a truncated ellipsoid using pyvista +# Create a truncated ellipsoid using PyVista # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ workdir = Path.home() / "pyansys-heart" / "simplified-geometries" / "truncated_LV" workdir = str(workdir.resolve().absolute()) @@ -66,7 +63,7 @@ ellips_endo = ellips_endo.clip(normal="z", origin=[0, 0, z_truncate]) ellips_epi = ellips_epi.clip(normal="z", origin=[0, 0, z_truncate]) -# compute x and y radius to create a closing disc. +# compute x and y radius to create a closing disc endo_bounds = ellips_endo.extract_feature_edges().bounds epi_bounds = ellips_epi.extract_feature_edges().bounds @@ -79,15 +76,14 @@ ellips_endo.cell_data["surface-id"] = 1 ellips_epi.cell_data["surface-id"] = 2 -# combine into single poly data object. +# combine into single polydata object heart: pv.PolyData = ellips_endo + ellips_epi + base heart.plot(show_edges=True) ############################################################################### # Convert the input to a HeartModel # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -# construct part definition dictionary +# Construct the part definition dictionary. part_definitions = { "Left ventricle": { "id": 1, @@ -99,17 +95,17 @@ } } -# use the combined polydata `heart` as input, where "surface-id" identifies each +# Use the combined polydata `heart` as input, where "surface-id" identifies each # of the relevant regions. # part definitions is used to map the remeshed model to the HeartModel parts/boundaries -# initialize left-ventricular heart model +# Initialize left-ventricular heart model. model = models.LeftVentricle(working_directory=workdir) -# clean working directory +# Clean working directory. clean_directory(workdir, [".stl", ".msh.h5", ".pickle"]) -# load input model +# Load input model. model.load_input(heart, part_definitions, "surface-id") ############################################################################### @@ -117,7 +113,8 @@ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # .. note:: -# The individual surfaces in the combined PolyData object are +# +# The individual surfaces in the combined ``PolyData`` object are # unconnected. Using the wrapper automatically fixes any small gaps # and ensures proper connectivity. diff --git a/examples/simulator/README.rst b/examples/simulator/README.rst index 87012c08e..8d535c79d 100644 --- a/examples/simulator/README.rst +++ b/examples/simulator/README.rst @@ -1,5 +1,4 @@ Simulator examples ================== -These examples show you how you can use the simulator module -to consume preprocessed models and consequently use these to run various -simulations. \ No newline at end of file +These examples show how to use the Simulator module +to consume preprocessed models, consequently using them to run various simulations. \ No newline at end of file diff --git a/examples/simulator/doc_EP-mechanics_simulator_fullheart_rodero.py b/examples/simulator/doc_EP-mechanics_simulator_fullheart_rodero.py index a3fe64929..ac6db801a 100644 --- a/examples/simulator/doc_EP-mechanics_simulator_fullheart_rodero.py +++ b/examples/simulator/doc_EP-mechanics_simulator_fullheart_rodero.py @@ -22,25 +22,20 @@ """ -Full heart EP-mechanics ------------------------ -This example shows you how to consume a full heart model and +Run a full-heart EP mechanics simulation +---------------------------------------- +This example shows how to consume a full-heart model and set it up for a coupled electromechanical simulation. """ ############################################################################### -# Example setup -# ------------- -# before computing the fiber orientation, purkinje network we need to load -# the required modules, load a heart model and set up the simulator. -# # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable. +# directory, heart model, and LS-DYNA executable file. # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/thumbnails/fh_epmeca.png' # sphinx_gallery_end_ignore @@ -54,97 +49,81 @@ from ansys.health.heart.settings.material.material import ISO, Mat295 from ansys.health.heart.simulator import DynaSettings, EPMechanicsSimulator -############################################################################### -# Example setup -# ------------- -# Perform the required imports -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Import the required modules and set relevant paths, including that of the working -# directory and generated model - -# sphinx_gallery_start_ignore -# sphinx_gallery_thumbnail_path = '/_static/images/full_heart_mesh.png' -# sphinx_gallery_end_ignore - -# accept dpf license agreement +# Accept the DPF license agreement. # https://dpf.docs.pyansys.com/version/stable/getting_started/licensing.html#ref-licensing os.environ["ANSYS_DPF_ACCEPT_LA"] = "Y" -# set working directory and path to model. Note that we assume here that that there is a -# preprocessed model called "heart_model.vtu" available in the working directory. +# Set the working directory and path to the model. This example assumes that there is a +# preprocessed model named ``heart_model.vtu`` in the working directory. workdir = Path.home() / "pyansys-heart" / "downloads" / "Rodero2021" / "01" / "FullHeart" path_to_model = str(workdir / "heart_model.vtu") ############################################################################### -# Load the full heart model +# Load the full-heart model # ~~~~~~~~~~~~~~~~~~~~~~~~~ - -# instantiate a four chamber model +# Load the full-heart model. model: models.FullHeart = models.FullHeart(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) - ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate the simulator and settings appropriately. - -# instantaiate dyna settings of choice +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate DYNA settings. lsdyna_path = r"your_dyna_exe" # tested with DEV-111820 dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="intelmpi", platform="wsl", num_cpus=6 ) -# instantiate simulator object +# Instantiate the simulator. simulator = EPMechanicsSimulator( model=model, dyna_settings=dyna_settings, simulation_directory=os.path.join(workdir, "ep-mechanics"), ) -# load default simulation settings +# Load default simulation settings. simulator.settings.load_defaults() -# compute fiber orientation in the ventricles and atria +# Compute fiber orientation in the ventricles and atria. simulator.compute_fibers() simulator.compute_left_atrial_fiber() simulator.compute_right_atrial_fiber(appendage=[39, 29, 98]) -# switch atria to active +# Switch the atria to active. simulator.model.left_atrium.fiber = True simulator.model.left_atrium.active = True simulator.model.right_atrium.fiber = True simulator.model.right_atrium.active = True -## Optionally, we can create more anatomical details. -## Sometimes, it's in favor of convergence rate of mechanical solve +## Optionally, you can create more anatomical details. +## Sometimes, it's in favor of convergence rate of the mechanical solve. -# Extract elements around atrial caps and assign as a passive material +# Extract elements around atrial caps and assign as a passive material. ring = simulator.model.create_atrial_stiff_ring(radius=5) -# material is stiff and value is arbitrarily chosen +# Material is stiff, and the value is arbitrarily chosen. stiff_iso = Mat295(rho=0.001, iso=ISO(itype=-1, beta=2, kappa=10, mu1=0.1, alpha1=2)) ring.meca_material = stiff_iso -# assign default EP material as for atrial +# Assign default EP material as for atrial. ring.ep_material = EPMaterial.Active() -# Compute universal coordinates: +# Compute UHCs (Universal Heart Coordinates). simulator.compute_uhc() -# Extract elements around atrialvenricular valves and assign as a passive material +# Extract elements around atrialvenricular valves and assign as a passive material. simulator.model.create_stiff_ventricle_base(stiff_material=stiff_iso) -# Estimate the stress-free-configuration +# Estimate the stress-free configuration. simulator.compute_stress_free_configuration() -# Compute the conduction system +# Compute the conduction system. simulator.compute_purkinje() simulator.compute_conduction_system() ############################################################################### -# Start main simulation -# ~~~~~~~~~~~~~~~~~~~~~ +# Start the main simulation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ simulator.settings.mechanics.analysis.end_time = Quantity(800, "ms") simulator.settings.mechanics.analysis.dt_d3plot = Quantity(10, "ms") @@ -155,12 +134,13 @@ # A constant pressure is prescribed to the atria. # No circulation system is coupled with the atria. -# start main simulation +# Start main simulation. simulator.dyna_settings.num_cpus = 10 simulator.simulate() ############################################################################### -# Result in LS-PrePost +# View results in LS-PrePost +# ~~~~~~~~~~~~~~~~~~~~~~~~~~ ############################################################################### # .. only:: html diff --git a/examples/simulator/doc_EP_simulator_FullHeart_rodero.py b/examples/simulator/doc_EP_simulator_FullHeart_rodero.py index 3763f46b8..6efb99735 100644 --- a/examples/simulator/doc_EP_simulator_FullHeart_rodero.py +++ b/examples/simulator/doc_EP_simulator_FullHeart_rodero.py @@ -22,24 +22,19 @@ """ -Full-heart EP-simulator example -------------------------------- -This example shows you how to consume a full-heart model and -set it up for the main electropysiology simulation. This examples demonstrates how -you can load a pre-computed heart model, compute the fiber direction, compute the -purkinje network and conduction system and finally simulate the electrophysiology. +Run a full-heart EP simulation +------------------------------ +This example shows how to consume a full-heart model and set it up for the +main EP (electrophysiology) simulation. It loads a pre-computed heart model +and computes the fiber orientation, Purkinje network, and conduction system. It +then simulates the electrophysiology. """ ############################################################################### -# Example setup -# ------------- -# before computing the fiber orientation, purkinje network we need to load -# the required modules, load a heart model and set up the simulator. -# # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable. +# directory, heart model, and LS-DYNA executable file. import os from pathlib import Path @@ -48,37 +43,39 @@ from ansys.health.heart.objects import Point from ansys.health.heart.simulator import DynaSettings, EPSimulator -# accept dpf license agreement +# Accept the DPF license agreement. # https://dpf.docs.pyansys.com/version/stable/getting_started/licensing.html#ref-licensing os.environ["ANSYS_DPF_ACCEPT_LA"] = "Y" -# set working directory and path to model. Note that we assume here that that there is a -# preprocessed model called "heart_model.vtu" available in the working directory. +# Set the working directory and path to the model. This example assumes that there is a +# preprocessed model named ``heart_model.vtu`` in the working directory. workdir = Path.home() / "pyansys-heart" / "downloads" / "Rodero2021" / "01" / "FullHeart" path_to_model = str(workdir / "heart_model.vtu") -# load four chamber heart model. +############################################################################### +# Load the full-heart model +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Load the full-heart model. model: models.FullHeart = models.FullHeart(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) - -# save model. +# Save the model. model.mesh.save(os.path.join(model.workdir, "simulation_model.vtu")) ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate the simulator and settings appropriately. +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate the simulator and define settings. -# specify LS-DYNA path (last tested working versions is intelmpi-linux-DEV-106117) +# Specify the LS-DYNA path. (The last tested working version is ``intelmpi-linux-DEV-106117``.) lsdyna_path = r"ls-dyna_msmpi.exe" -# instantaiate dyna settings of choice +# Instantiate DYNA settings. dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="intelmpi", num_cpus=6, platform="wsl" ) -# instantiate simulator. Change options where necessary. +# Instantiate the simulator, modifying options as necessary. simulator = EPSimulator( model=model, dyna_settings=dyna_settings, @@ -88,9 +85,7 @@ ############################################################################### # Load simulation settings # ~~~~~~~~~~~~~~~~~~~~~~~~ -# Here we load the default settings. - -# Define electrode positions and add them to model +# Define electrode positions and add them to the model. electrodes = [ Point(name="V1", xyz=[76.53798632905277, 167.67667039945263, 384.3139099410445]), Point(name="V2", xyz=[64.97540262482013, 134.94983038904573, 330.4783062379255]), @@ -105,21 +100,23 @@ ] model.electrodes = electrodes +# Load the default settings. simulator.settings.load_defaults() ############################################################################### -# Compute the fiber orientation -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Compute fiber orientation and plot the computed fibers on the entire model. +# Compute fiber orientation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute fiber orientation and plot the fibers on the entire model. ############################################################################### # .. warning:: -# Atrial fiber orientation is approximated by apex-base direction in this model +# The atrial fiber orientation is approximated by the apex-base direction. +# Development is ongoing. -# compute ventricular fibers +# Compute ventricular fibers. simulator.compute_fibers() -# compute atrial fibers +# Compute atrial fibers. simulator.model.right_atrium.active = True simulator.model.left_atrium.active = True simulator.model.right_atrium.fiber = True @@ -134,16 +131,15 @@ # :align: center ############################################################################### -# Compute conduction system -# ~~~~~~~~~~~~~~~~~~~~~~~~~ -# Compute conduction system and purkinje network and visualize. -# The action potential will propagate faster through this system -# compared to the rest of the model. +# Compute the conduction system +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute the conduction system and Purkinje network, and then visualize the results. +# The action potential propagates faster through this system compared to the rest of the model. simulator.compute_purkinje() -# by calling this method, stimulation will be at the atrioventricular node -# if you skip it, the two apex regions of the ventricles will be stimulated +# By calling this method, stimulation is at the atrioventricular node. +# If you do not call this method, the two apex regions of the ventricles are stimulated. simulator.compute_conduction_system() simulator.model.plot_purkinje() @@ -154,19 +150,19 @@ # :align: center ############################################################################### -# Start main simulation -# ~~~~~~~~~~~~~~~~~~~~~ +# Start the main simulation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ # Start the main EP simulation. This uses the previously computed fiber orientation -# and purkinje network to set up and run the LS-DYNA model. +# and Purkinje network to set up and run the LS-DYNA model. -# simulate using the default EP solver type (Monodomain) +# Simulate using the default EP solver type (Monodomain). simulator.simulate() -# switch to Eikonal +# Switch to Eikonal. simulator.settings.electrophysiology.analysis.solvertype = "Eikonal" simulator.simulate(folder_name="main-ep-Eikonal") -# switch to ReactionEikonal +# Switch to ReactionEikonal. simulator.settings.electrophysiology.analysis.solvertype = "ReactionEikonal" simulator.simulate(folder_name="main-ep-ReactionEikonal") diff --git a/examples/simulator/doc_EP_simulator_fourchamber.py b/examples/simulator/doc_EP_simulator_fourchamber.py index 0f313cf88..d17fabc16 100644 --- a/examples/simulator/doc_EP_simulator_fourchamber.py +++ b/examples/simulator/doc_EP_simulator_fourchamber.py @@ -22,27 +22,22 @@ """ -Four-chamber EP-simulator example ---------------------------------- -This example shows you how to consume a four-cavity heart model and -set it up for the main electropysiology simulation. This examples demonstrates how -you can load a pre-computed heart model, compute the fiber direction, compute the -purkinje network and conduction system and finally simulate the electrophysiology. +Run a four-chamber heart EP simulation +-------------------------------------- +This example shows how to consume a four-chamber heart model and +set it up for the main EP (electropysiology) simulation. It loads a pre-computed +heart model and computes the fiber direction, Purkinje network, and conduction system. +It then simulates the electrophysiology. """ ############################################################################### -# Example setup -# ------------- -# before computing the fiber orientation, purkinje network we need to load -# the required modules, load a heart model and set up the simulator. -# # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable. +# directory, heart model, and LS-DYNA executable file. # sphinx_gallery_start_ignore -# Note that we need to put the thumbnail here to avoid weird rendering in the html page. +# Note that we need to put the thumbnail here to avoid weird rendering on the HTML page. # sphinx_gallery_thumbnail_path = '_static/images/purkinje.png' # sphinx_gallery_end_ignore @@ -54,23 +49,23 @@ from ansys.health.heart.settings.settings import DynaSettings from ansys.health.heart.simulator import EPSimulator -# accept dpf license agreement +# Accept the DPF license agreement. # https://dpf.docs.pyansys.com/version/stable/getting_started/licensing.html#ref-licensing os.environ["ANSYS_DPF_ACCEPT_LA"] = "Y" -# set working directory and path to model. Note that we expect a pre-processed model -# stored as "heart_model.vtu" in this folder. +# Set the working directory and path to the model. This example assumes that there is a +# preprocessed model named ``heart_model.vtu`` in the working directory. workdir = Path.home() / "pyansys-heart" / "downloads" / "Strocchi2020" / "01" / "FourChamber" path_to_model = str(workdir / "heart_model.vtu") -# specify LS-DYNA path (last tested working versions is intelmpi-linux-DEV-106117) +# Specify the LS-DYNA path. (The last tested working version is ``intelmpi-linux-DEV-106117``.) lsdyna_path = r"ls-dyna_msmpi.exe" -# load four chamber heart model. +# Load the four-chamber heart model. model: models.FourChamber = models.FourChamber(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) -# Define electrode positions and add them to model (correspond to patient 01 only) +# Define electrode positions and add them to model (Positions are for patient 01 only.) # Positions were defined using a template torso geometry. electrodes = [ Point(name="V1", xyz=[-29.893285751342773, 27.112899780273438, 373.30865478515625]), @@ -94,16 +89,16 @@ model.workdir = str(workdir) ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate the simulator and settings appropriately. +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate the simulator and define settings. -# instantaiate dyna settings of choice +# Instantiate DYNA settings. dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="smp", num_cpus=4, platform="windows" ) -# instantiate simulator. Change options where necessary. +# Instantiate simulator and modify options as needed. simulator = EPSimulator( model=model, dyna_settings=dyna_settings, @@ -113,14 +108,15 @@ ############################################################################### # Load simulation settings # ~~~~~~~~~~~~~~~~~~~~~~~~ -# Here we load the default settings. +# Load the default simulation settings. simulator.settings.load_defaults() ############################################################################### -# Compute Universal Ventricular Coordinates -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# The transmural coordinate is used to define the endo, mid and epi layers. +# Compute UVCs +# ~~~~~~~~~~~~ +# Compute UNCs (Universal Ventricular Coordinates). The transmural coordinate +# is used to define the endo, mid, and epi layers. ############################################################################### @@ -133,20 +129,21 @@ ############################################################################### # .. warning:: -# Atrial fiber orientation is approximated by apex-base direction, the development is undergoing. +# The atrial fiber orientation is approximated by the apex-base direction. +# Development is ongoing. -# compute ventricular fibers +# Compute ventricular fibers. simulator.compute_fibers() -# compute atrial fibers +# Compute atrial fibers. simulator.model.right_atrium.active = True simulator.model.left_atrium.active = True simulator.model.right_atrium.fiber = True simulator.model.left_atrium.fiber = True -# Strocchi/Rodero data has marked left atrium appendage point +# Strocchi/Rodero data has a marked left atrium appendage point. simulator.compute_left_atrial_fiber() -# need to manually select the right atrium appendage point +# Select the right atrium appendage point. simulator.compute_right_atrial_fiber(appendage=[-33, 82, 417]) simulator.model.plot_fibers(n_seed_points=2000) @@ -157,16 +154,15 @@ # :align: center ############################################################################### -# Compute conduction system -# ~~~~~~~~~~~~~~~~~~~~~~~~~ -# Compute conduction system and purkinje network and visualize. -# The action potential will propagate faster through this system -# compared to the rest of the model. +# Compute the conduction system +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute the conduction system and Purkinje network, and then visualize the results. +# The action potential propagates faster through this system compared to the rest of the model. simulator.compute_purkinje() -# by calling this method, stimulation will at Atrioventricular node -# if you skip it, stimulation will at apex nodes of two ventricles +# By calling this method, stimulation occurs at the Atrioventricular node. +# If you do not call this method, stimulation occurs at the apex nodes of the two ventricles. simulator.compute_conduction_system() simulator.model.plot_purkinje() @@ -177,11 +173,11 @@ # :align: center ############################################################################### -# Start main simulation -# ~~~~~~~~~~~~~~~~~~~~~ +# Start the main simulation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ # Start the main EP simulation. This uses the previously computed fiber orientation -# and purkinje network to set up and run the LS-DYNA model using different solver -# options +# and Purkinje network to set up and run the LS-DYNA model with different solver +# options. simulator.simulate() # The two following solves only work with LS-DYNA DEV-110013 or later @@ -192,7 +188,7 @@ ############################################################################### -# We can plot transmembrane potential in LS-PrePost +# View a plot of the transmembrane potential in LS-PrePost. ############################################################################### # .. only:: html diff --git a/examples/simulator/doc_mechanics_simulator_fullheart_rodero_01.py b/examples/simulator/doc_mechanics_simulator_fullheart_rodero_01.py index 2cfe5378c..80c25a9ad 100644 --- a/examples/simulator/doc_mechanics_simulator_fullheart_rodero_01.py +++ b/examples/simulator/doc_mechanics_simulator_fullheart_rodero_01.py @@ -22,25 +22,19 @@ """ -Full-heart mechanics --------------------- -This example shows you how to consume a preprocessed full heart model and -set it up for the main mechanical simulation. This examples demonstrates how -you can load a pre-computed heart model, compute the fiber direction, compute the -stress free configuration, and finally simulate the mechanical model. +Run a full-heart mechanics simulation +------------------------------------- +This example shows how to consume a preprocessed full-heart model and +set it up for the main mechanical simulation. It loads a pre-computed heart model +and computes the fiber orientation and stress-free configuration. It then runs the +simulation. """ ############################################################################### -# Example setup -# ------------- -# before computing the fiber orientation, and stress free configuration we -# need to load the required modules, load a heart model and configure the -# mechanical simulator. -# # Perform the required imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Import the required modules and set relevant paths, including that of the working -# directory, model, and ls-dyna executable. +# directory, heart model, and LS-DYNA executable file. import os from pathlib import Path @@ -48,36 +42,39 @@ import ansys.health.heart.models as models from ansys.health.heart.simulator import DynaSettings, MechanicsSimulator -# accept dpf license agreement +# Accept the PDF license agreement. # https://dpf.docs.pyansys.com/version/stable/getting_started/licensing.html#ref-licensing os.environ["ANSYS_DPF_ACCEPT_LA"] = "Y" -# set working directory and path to model. Note that we assume here that that there is a -# preprocessed model called "heart_model.vtu" available in the working directory. +# Set the working directory and path to the model. This example assumes that there is a +# preprocessed model named ``heart_model.vtu`` in the working directory. workdir = Path.home() / "pyansys-heart" / "downloads" / "Rodero2021" / "01" / "FullHeart" path_to_model = str(workdir / "heart_model.vtu") -# load the full heart. +############################################################################### +# Load the full-heart model +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Load the full-heart model. model: models.FullHeart = models.FullHeart(working_directory=workdir) model.load_model_from_mesh(path_to_model, path_to_model.replace(".vtu", ".partinfo.json")) ############################################################################### -# Instantiate the simulator object -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# instantiate your DynaSettings and Simulator objects. -# Change options where necessary. Note that you may need to configure your environment -# variables if you choose to use a `mpi` version of LS-DYNA. +# Instantiate the simulator +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Instantiate objects for the DYNA settings and simulator, modifying options as necessary. +# Note that you might need to configure your environment +# variables if you choose to use an ``mpi`` version of LS-DYNA. -# instantiate dyna settings object +# Set LS-DYNA path. lsdyna_path = "lsdyna_intelmpi" -# instantiate dyna settings object +# Instantiate DYNA settings object. dyna_settings = DynaSettings( lsdyna_path=lsdyna_path, dynatype="intelmpi", num_cpus=8, ) -# instantiate simulator object +# Instantiate simulator object. simulator = MechanicsSimulator( model=model, dyna_settings=dyna_settings, @@ -87,18 +84,18 @@ ############################################################################### # Load simulation settings # ~~~~~~~~~~~~~~~~~~~~~~~~ -# Here we load the default settings. +# Load the default simulation settings. simulator.settings.load_defaults() ############################################################################### -# Compute the fiber orientation -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Compute fiber orientation and plot the computed fibers on the entire model. +# Compute fiber orientation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ +# Compute the fiber orientation and plot the fibers on the entire model. simulator.compute_fibers() -# # Plot the resulting fiber orientation +# Plot the resulting fiber orientation. simulator.model.plot_fibers(n_seed_points=2000) ############################################################################### @@ -107,25 +104,24 @@ # :align: center ############################################################################### -# Compute the stress free configuration +# Compute the stress-free configuration # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Compute the stress free configuration. That is, when imaged under diastole -# we need to approximate the initial stress at `t=0`. The stress free configuration -# is computed through Rausch' method. +# Compute the stress-free configuration using the Rausch method. When imaged under diastole, +# this approximates the initial stress at ``t=0``. simulator.compute_stress_free_configuration() ############################################################################### -# Start main simulation -# ~~~~~~~~~~~~~~~~~~~~~ +# Start the main simulation +# ~~~~~~~~~~~~~~~~~~~~~~~~~ # Start the main mechanical simulation. This uses the previously computed fiber orientation -# and stress free configuration and runs the final LS-DYNA heart model. +# and stress-free configuration and runs the final LS-DYNA heart model. simulator.simulate() # sphinx_gallery_start_ignore -# Generate static images for docs. +# Generate static images for documentation. # from pathlib import Path diff --git a/src/ansys/health/heart/__init__.py b/src/ansys/health/heart/__init__.py index d0b859da6..76125b0bd 100644 --- a/src/ansys/health/heart/__init__.py +++ b/src/ansys/health/heart/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""PyAnsys Heart is a Python framework for heart modeling using ANSYS tools.""" +"""PyAnsys Heart is a Python framework for heart modeling using Ansys tools.""" import os @@ -44,4 +44,4 @@ pass __version__ = importlib_metadata.version("ansys-health-heart") -"""The version of pyansys-heart.""" +"""Version of PyAnsys Heart.""" diff --git a/src/ansys/health/heart/exceptions.py b/src/ansys/health/heart/exceptions.py index 33fb41043..7a9cce1ee 100644 --- a/src/ansys/health/heart/exceptions.py +++ b/src/ansys/health/heart/exceptions.py @@ -20,11 +20,11 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Custom exceptions for the PyAnsys-Heart package.""" +"""Custom exceptions for PyAnsys Heart.""" class LSDYNATerminationError(BaseException): - """Exception raised when `Normal Termination` is not found in the LS-DYNA logs.""" + """Exception raised when ``Normal Termination`` is not found in the LS-DYNA logs.""" def __init__(self): super().__init__("The LS-DYNA process did not terminate as expected.") @@ -57,4 +57,4 @@ class InvalidHeartModelError(Exception): class LSDYNANotFoundError(FileNotFoundError): - """LSDYNA executable not found.""" + """Exception raised when the LS-DYNA executable file not found.""" diff --git a/src/ansys/health/heart/logger.py b/src/ansys/health/heart/logger.py index dc615c28e..6819a7e81 100644 --- a/src/ansys/health/heart/logger.py +++ b/src/ansys/health/heart/logger.py @@ -22,14 +22,14 @@ """Logging module. -The logging module supplies a general framework for logging in PyAnsys Heart-lib. -This module is built upon `logging `_ library -and it does not intend to replace it rather provide a way to interact between -``logging`` and PyAnsys Heart. +The logging module supplies a general framework for logging in PyAnsys Heart. +This module is built on the Python `logging `_ +library. It does not intend to replace it but rather provide a way to interact between +the Python ``logging`` library and PyAnsys Heart. -The loggers used in the module include the name of the instance which +The loggers used in the module include the name of the instance, which is intended to be unique. This name is printed in all the active -outputs and it is used to track the different PyAnsys Heart modules. +outputs and is used to track the different PyAnsys Heart modules. Usage @@ -37,23 +37,23 @@ Global logger ~~~~~~~~~~~~~ -There is a global logger named ``PyAnsys Heart_global`` which is created at -``ansys.health.heart.__init__``. If you want to use this global logger, -you must call at the top of your module: +There is a global logger named ``PyAnsys Heart_global`` that is created when +``ansys.health.heart.__init__`` is called. If you want to use this global logger, +you must call it at the top of your module: .. code:: python from ansys.health.heart import LOG -You could also rename it to avoid conflicts with other loggers (if any): +You can rename this logger to avoid conflicts with other loggers (if any): .. code:: python from ansys.health.heart import LOG as logger -It should be noticed that the default logging level of ``LOG`` is ``ERROR``. -To change this and output lower level messages you can use the next snippet: +The default logging level of ``LOG`` is ``ERROR``. +You can change this elvel and output lower-level messages with this code: .. code:: python @@ -62,16 +62,15 @@ LOG.std_out_handler.setLevel("DEBUG") # If present. -Alternatively: +Alternatively, you can ensure all the handlers are set to the input log level +with this code: .. code:: python LOG.setLevel("DEBUG") -This way ensures all the handlers are set to the input log level. - -By default, this logger does not log to a file. If you wish to do so, -you can add a file handler using: +This logger does not log to a file by default. If you want, you can +add a file handler with this code: .. code:: python @@ -80,12 +79,12 @@ file_path = os.path.join(os.getcwd(), "pymapdl.log") LOG.log_to_file(file_path) -This sets the logger to be redirected also to that file. If you wish +This also sets the logger to be redirected to this file. If you want to change the characteristics of this global logger from the beginning -of the execution, you must edit the file ``__init__`` in the directory -``ansys.health.heart``. +of the execution, you must edit the file ``__init__`` file in the +``ansys.health.heart`` directory. -To log using this logger, just call the desired method as a normal logger. +To log using this logger, call the desired method as a normal logger: .. code:: pycon @@ -98,8 +97,8 @@ Other loggers ~~~~~~~~~~~~~ -You can create your own loggers using python ``logging`` library as -you would do in any other script. There shall no be conflicts between +You can create your own loggers using the Python ``logging`` library as +you would do in any other script. There would be no conflicts between these loggers. """ @@ -157,18 +156,14 @@ class PyAnsysHeartCustomAdapter(logging.LoggerAdapter): - """Custom logging adapter for PyAnsys Heart. - - Notes - ----- - This is key to keep the reference to the PyAnsys Heart instance name dynamic. + """Keeps the reference to the PyAnsys Heart service instance dynamic. - If we use the standard approach which is supplying ``extra`` input - to the logger, we would need to keep inputting PyAnsys Heart instances - every time we do a log. + If you use the standard approach, which is supplying *extra* input + to the logger, you must input PyAnsys Heart service instances + each time that you log a message. - Using adapters we just need to specify the PyAnsys Heart instance we refer - to once. + Using adapters, you only need to specify the PyAnsys Heart service instance + that you are referring to once. """ level = ( @@ -194,28 +189,28 @@ def process(self, msg: str, kwargs: MutableMapping[str, Dict[str, str]]): def log_to_file( self, filename: str = FILE_NAME, level: LOG_LEVEL_TYPE = LOG_LEVEL_FILE ) -> None: - """Add file handler to logger. + """Add a file handler to the logger. Parameters ---------- - filename : str, optional - Name of the file where the logs are recorded. By default FILE_NAME - level : str or int, optional - Level of logging. E.x. 'DEBUG'. By default LOG_LEVEL + filename : str, default: FILE_NAME + Name of the file to record logs to. + level : str or int, default: LOG_LEVEL + Level of logging, such as ``DEBUG``. """ addfile_handler(self.logger, filename=filename, level=level, write_headers=True) self.file_handler = self.logger.file_handler def log_to_stdout(self, level: LOG_LEVEL_TYPE = LOG_LEVEL_STDOUT) -> None: - """Add standard output handler to the logger. + """Add a standard output handler to the logger. Parameters ---------- - level : str or int, optional - Level of logging record. By default LOG_LEVEL + level : str or int, default: LOG_LEVEL + Level of the logging record. """ if self.std_out_handler: - raise Exception("Stdout logger already defined.") + raise Exception("Stdout logger is already defined.") add_stdout_handler(self.logger, level=level) self.std_out_handler = self.logger.std_out_handler @@ -242,11 +237,11 @@ def _format(self, record) -> str: else: values = record.__dict__ - # We can do here any changes we want in record, for example adding a key. + # We can make any changes that we want in the record here. For example, adding a key. - # We could create an if here if we want conditional formatting, and even + # We could create an ``if`` here if we want conditional formatting, and even # change the record.__dict__. - # Since now we don't want to create conditional fields, it is fine to keep + # Because we don't want to create conditional fields now, it is fine to keep # the same MSG_FORMAT for all of them. # For the case of logging exceptions to the logger. @@ -256,7 +251,7 @@ def _format(self, record) -> str: class _PyAnsysHeartFormatter(logging.Formatter): - """Customized ``Formatter`` class used to overwrite the defaults format styles.""" + """Provides a ``Formatter`` class for overwriting default format styles.""" def __init__( self, @@ -275,7 +270,7 @@ def __init__( class InstanceFilter(logging.Filter): - """Ensure that instance_name record always exists.""" + """Ensures that the ``instance_name`` record always exists.""" def filter(self, record: logging.LogRecord): """Filter record.""" @@ -287,31 +282,26 @@ def filter(self, record: logging.LogRecord): class Logger: - """Logger used for each PyAnsys Heart session. + """Provides the logger used for each PyAnsys Heart session. - Notes - ----- - This class allows you to add handlers to the logger to output to a file or - standard output. + This class lets you add handlers to the logger to output messages to a file or + to the standard output (stdout). Parameters ---------- - level : int, optional + level : int, default: logging.DEBUG Logging level to filter the message severity allowed in the logger. - The default is ``logging.DEBUG``. - to_file : bool, optional - Write log messages to a file. The default is ``False``. - to_stdout : bool, optional - Write log messages into the standard output. The default is - ``True``. - filename : str, optional - Name of the file where log messages are written to. - The default is ``FILE_NAME``. + to_file : bool, default: False + Whether to write log messages to a file. + to_stdout : bool, default: True + Whether to write the log messages to the standard output. + filename : str, default: FILE_NAME + Name of the file to write log messages to. Examples -------- - Demonstrate logger usage from a PyAnsys Heart instance. This is automatically - created when creating an PyAnsys Heart instance. + Demonstrate logger usage from a PyAnsys Heart instance, which is automatically + created when a PyAnsys Heart instance is created. Import the global PyAnsys Heart logger and add a file output handler. @@ -319,7 +309,6 @@ class Logger: >>> from ansys.health.heart import LOG >>> file_path = os.path.join(os.getcwd(), "PyAnsys Heart.log") >>> LOG.log_to_file(file_path) - """ file_handler: Optional[logging.FileHandler] = None @@ -334,19 +323,19 @@ def __init__( to_stdout: bool = True, filename: str = FILE_NAME, ): - """Initialize main logger class for PyAnsys Heart. + """Initialize the main logger class for PyAnsys Heart. Parameters ---------- - level : str or int, optional - Level of logging as defined in the package ``logging``. By default 'DEBUG'. - to_file : bool, optional - To record the logs in a file, by default ``False``. - to_stdout : bool, optional - To output the logs to the standard output, which is the - command line. By default ``True``. - filename : str, optional - Name of the output file. By default ``PyAnsys Heart.log``. + level : str or int, default: logging.DEBUG + Level of logging as defined in the ``logging`` package. + to_file : bool, default: False + Whether to write log messages to a file. + to_stdout : bool, default: True + Whether to write log messages to the standard output (stdout), which is the + command line. + filename : str, default: FILE_NAME + Name of the output file, which is ``'PyAnsys Heart.log'`` by default. """ # create default main logger self.logger: logging.Logger = logging.getLogger("PyAnsys Heart_global") @@ -382,27 +371,25 @@ def log_to_file( level: LOG_LEVEL_TYPE = LOG_LEVEL_FILE, remove_other_file_handlers: bool = False, ) -> None: - """Add file handler to logger. + """Add a file handler to the logger. Parameters ---------- - filename : str, optional - Name of the file where the logs are recorded. By default - ``'PyAnsys Heart.log'``. - level : str or int, optional - Level of logging. By default ``'DEBUG'``. - remove_other_file_handlers : bool, optional - Flag indicating whether to remove all other file handlers, by default False + filename : str, default: + Name of the file to record logs to, which is ``'PyAnsys Heart.log'`` by default. + level : str or int, default: LOG_LEVEL_FILE + Level of logging, which is ``'DEBUG'`` by default. + remove_other_file_handlers : bool, default: False + Whether to remove all other file handlers. Examples -------- - Write to ``PyAnsys Heart.log`` in the current working directory. + Write to the ``PyAnsys Heart.log`` file in the current working directory. >>> from ansys.health.heart import LOG >>> import os >>> file_path = os.path.join(os.getcwd(), "PyAnsys Heart.log") >>> LOG.log_to_file(file_path) - """ if remove_other_file_handlers: _clear_all_file_handlers(self) @@ -410,17 +397,23 @@ def log_to_file( addfile_handler(self, filename=filename, level=level, write_headers=True) def log_to_stdout(self, level: LOG_LEVEL_TYPE = LOG_LEVEL_STDOUT): - """Add standard output handler to the logger. + """Add a standard output handler to the logger. Parameters ---------- - level : str or int, optional - Level of logging record. By default ``'DEBUG'``. + level : str or int, default: LOG_LEVEL_STDOUT + Level of logging record, which is ``'DEBUG'`` by default. """ add_stdout_handler(self, level=level) def setLevel(self, level: LOG_LEVEL_TYPE = "DEBUG"): # noqa: N802 - """Change the log level of the object and the attached handlers.""" + """Set the log level for the logger and its handlers. + + Parameters + ---------- + level : str or int, default: "DEBUG" + The logging level to set. + """ if isinstance(level, str): level = string_to_loglevel[cast(LOG_LEVEL_STRING_TYPE, level.upper())] self.logger.setLevel(level) @@ -431,8 +424,8 @@ def setLevel(self, level: LOG_LEVEL_TYPE = "DEBUG"): # noqa: N802 def _make_child_logger(self, suffix: str, level: Optional[LOG_LEVEL_TYPE]) -> logging.Logger: """Create a child logger. - Uses ``getChild`` or copying attributes between ``pymapdl_global`` - logger and the new one. + This method uses the ``getChild()``method or copies attributes between the + ``pymapdl_global`` logger and the new one. """ logger = logging.getLogger(suffix) logger.std_out_handler = None @@ -447,7 +440,7 @@ def _make_child_logger(self, suffix: str, level: Optional[LOG_LEVEL_TYPE]) -> lo logger.std_out_handler = new_handler if level: - # The logger handlers are copied and changed the loglevel is + # The logger handlers are copied and changed. The loglevel is # the specified log level is lower than the one of the # global. if isinstance(level, str): @@ -476,18 +469,22 @@ def _make_child_logger(self, suffix: str, level: Optional[LOG_LEVEL_TYPE]) -> lo def add_child_logger(self, suffix: str, level: Optional[LOG_LEVEL_TYPE] = None): """Add a child logger to the main logger. - This logger is more general than an instance logger which is designed to - track the state of the PyAnsys Heart instances. + This logger is more general than an instance logger, which is designed to + track the state of PyAnsys Heart instances. + + If the logging level is in the arguments, a new logger with a reference + to the ``_global`` logger handlers is created instead of a child logger. + If the logging level is in the arguments, a new logger with a reference - to the ``_global`` logger handlers is created instead of a child. + to the ``_global`` logger handlers is created instead of a child logger. Parameters ---------- suffix : str Name of the logger. - level : str or int, optional - Level of logging + level : str or int, default: None + Level of logging. Returns ------- @@ -506,7 +503,13 @@ def __getitem__(self, key): raise KeyError(f"There is no instances with name {key}.") def add_handling_uncaught_expections(self, logger: logging.Logger): - """Redirect the output of an exception to the logger.""" + """Redirect the output of an exception to a logger. + + Parameters + ---------- + logger : str + Name of the logger. + """ def handle_exception( exc_type: Type[BaseException], @@ -532,13 +535,13 @@ def addfile_handler(logger, filename=FILE_NAME, level=LOG_LEVEL_STDOUT, write_he ---------- logger : logging.Logger Logger to add the file handler to. - filename : str, default: "pyconv-de.log" - Name of the output file. + filename : str, default: FILE_NAME + Name of the output file, which is ``'pyconv-de.log'`` by default. level : int, default: 10 Level of logging. The default is ``10``, in which case the ``logging.DEBUG`` level is used. write_headers : bool, default: False - Whether to write the headers to the file. + Whether to write headers to the file. Returns ------- @@ -570,7 +573,7 @@ def _clear_all_file_handlers(logger: Logger) -> Logger: Parameters ---------- logger : Logger - Logger from which to clear the file handlers. + Logger to clear file handlers from. Returns ------- @@ -593,8 +596,8 @@ def add_stdout_handler(logger, level=LOG_LEVEL_STDOUT, write_headers=False): Parameters ---------- logger : logging.Logger - Logger to add the file handler to. - level : int, default: 10 + Logger to add the stdout handler to. + level : int, default: ``10`` Level of logging. The default is ``10``, in which case the ``logging.DEBUG`` level is used. write_headers : bool, default: False diff --git a/src/ansys/health/heart/models.py b/src/ansys/health/heart/models.py index 95fe146e9..0e56c0a2c 100644 --- a/src/ansys/health/heart/models.py +++ b/src/ansys/health/heart/models.py @@ -61,14 +61,14 @@ def _get_axis_from_field_data( mesh: Mesh | pv.UnstructuredGrid, axis_name: Literal["l4cv_axis", "l2cv_axis", "short_axis"] ) -> dict: - """Get the axis from mesh field data.""" + """Get axis from the mesh field data.""" try: return { "center": mesh.field_data[axis_name][0], "normal": mesh.field_data[axis_name][1], } except KeyError: - LOGGER.info(f"Failed to retrieve {axis_name} from mesh field data") + LOGGER.info(f"Failed to retrieve {axis_name} from the mesh field data") return None @@ -83,24 +83,24 @@ def _set_field_data_from_axis( return None data = np.array([value for value in axis.values()]) if data.shape != (2, 3): - LOGGER.info("Data has wrong shape, expecting (2,3) shaped data.") + LOGGER.info("Data has wrong shape. Expecting (2,3) shaped data.") return None mesh.field_data[axis_name] = data return mesh def _read_purkinje_from_kfile(filename: pathlib.Path): - """Read purkinje from k file. + """Read Purkinje from a k file. Parameters ---------- filename : pathlib.Path - Purkinje filename. + Full path to the K file. Returns ------- _type_ - Beam data extracted from file: beam_nodes,edges,mask,pid + Beam data extracted from file: beam_nodes, edges, mask, pid """ # Open file and import beams and created nodes with open(filename, "r") as file: @@ -143,8 +143,8 @@ def _set_workdir(workdir: pathlib.Path | str = None) -> str: Parameters ---------- - workdir : pathlib.Path | str, optional - Path to desired working directory, by default None + workdir : pathlib.Path | str, default: None + Path to the desired working directory. Returns ------- @@ -172,7 +172,7 @@ class HeartModel: @property def parts(self) -> List[Part]: - """Return list of parts.""" + """List of parts.""" parts = [] for key, value in self.__dict__.items(): attribute = getattr(self, key) @@ -182,7 +182,7 @@ def parts(self) -> List[Part]: @property def part_names(self) -> List[str]: - """Return list of part names.""" + """List of part names.""" part_names = [] for part in self.parts: part_names.append(part.name) @@ -190,47 +190,47 @@ def part_names(self) -> List[str]: @property def part_ids(self) -> List[int]: - """Return list of used part ids.""" + """List of used part IDs.""" return [part.pid for part in self.parts] @property def surfaces(self) -> List[SurfaceMesh]: - """Return list of all defined surfaces.""" + """List of all defined surfaces.""" return [s for p in self.parts for s in p.surfaces] @property def surface_names(self) -> List[str]: - """Return list of all defined surface names.""" + """List of all defined surface names.""" return [s.name for s in self.surfaces] @property def surface_ids(self) -> List[str]: - """Return list of all defined surface names.""" + """List of all defined surface IDs.""" return [s.id for s in self.surfaces] @property def cavities(self) -> List[Cavity]: - """Return list of cavities in the model.""" + """List of all cavities in the model.""" return [part.cavity for part in self.parts if part.cavity] @property def part_name_to_part_id(self) -> dict: - """Dictionary that maps the part name to the part id.""" + """Dictionary that maps the part name to the part ID.""" return {p.name: p.pid for p in self.parts} @property def part_id_to_part_name(self) -> dict: - """Dictionary that maps part id to part name.""" + """Dictionary that maps the part ID to the part name.""" return {p.pid: p.name for p in self.parts} @property def surface_name_to_surface_id(self) -> dict: - """Dictionary that maps surface name to surface id.""" + """Dictionary that maps the surface name to the surface ID.""" return {s.name: s.id for p in self.parts for s in p.surfaces} @property def surface_id_to_surface_name(self) -> dict: - """Dictionary that maps surface name to surface id.""" + """Dictionary that maps the surface ID to the surface name.""" return {s.id: s.name for p in self.parts for s in p.surfaces} @property @@ -265,7 +265,7 @@ def short_axis(self, axis: dict): @property def cap_centroids(self): - """Return list of cap centroids.""" + """List of cap centroids.""" return [ Point(name=c.name + "_center", xyz=c.centroid, node_id=c.global_centroid_id) for p in self.parts @@ -273,17 +273,16 @@ def cap_centroids(self): ] def __init__(self, working_directory: pathlib.Path | str = None) -> None: - """Initialize the HeartModel. + """Initialize the heart model. Parameters ---------- - working_directory : pathlib.Path | str, optional - Path to desired working directory, by default None + working_directory : pathlib.Path | default: None + Path to desired working directory. Notes ----- - Note that if no working directory is specified it will default to the current - working directory. + If no working directory is specified, the current working directory is used. """ self.workdir = _set_workdir(working_directory) """Working directory.""" @@ -296,7 +295,7 @@ def __init__(self, working_directory: pathlib.Path | str = None) -> None: #! TODO: non-functional flag. Remove or replace. self._add_blood_pool: bool = False - """Flag indicating whether to add a blood pool mesh (Experimental).""" + """Flag indicating whether a blood pool mesh is added. (Experimental)""" self._input: _InputModel = None """Input model.""" @@ -305,7 +304,7 @@ def __init__(self, working_directory: pathlib.Path | str = None) -> None: """Add any subparts.""" self._set_part_ids() - """Set incremental part ids.""" + """Set incremental part IDs.""" self.electrodes: List[Point] = [] """Electrodes positions for ECG computing.""" @@ -317,7 +316,7 @@ def __init__(self, working_directory: pathlib.Path | str = None) -> None: """Electrodes positions for ECG computing.""" self._part_info = {} - """Information about all the parts in the model.""" + """Information about all parts in the model.""" self._short_axis: dict = None """Short axis.""" @@ -334,28 +333,28 @@ def __str__(self): # TODO: There is overlap with the input module. def _get_parts_info(self): - """Get the id to model map that allows reconstructing the model from a mesh object.""" + """Get the ID to the model map that allows reconstructing the model from a mesh object.""" for part in self.parts: self._part_info.update(part._get_info()) return self._part_info def create_part_by_ids(self, eids: List[int], name: str) -> Union[None, Part]: - """Create a new part by element ids. + """Create a part by element IDs. Parameters ---------- eids : List[int] - element id list + List of element IDs. name : str - part name + Part name. Returns ------- Union[None, Part] - return the part if succeed + Part if successful """ if len(eids) == 0: - LOGGER.error(f"Failed to create {name}. Element list is empty") + LOGGER.error(f"Failed to create {name}. Element list is empty.") return None if name in [p.name for p in self.parts]: @@ -366,7 +365,7 @@ def create_part_by_ids(self, eids: List[int], name: str) -> Union[None, Part]: try: part.element_ids = np.setdiff1d(part.element_ids, eids) except ValueError: - LOGGER.error(f"Failed to create part {name}") + LOGGER.error(f"Failed to create part {name}.") return None self.add_part(name) @@ -377,14 +376,14 @@ def create_part_by_ids(self, eids: List[int], name: str) -> Union[None, Part]: return new_part def add_purkinje_from_kfile(self, filename: pathlib.Path, name: str) -> None: - """Read an LS-DYNA file containing purkinje beams and nodes. + """Read an LS-DYNA file containing Purkinje beams and nodes. Parameters ---------- filename : pathlib.Path - + Full path to the LS-DYNA file. name : str - beamnet name + Beamnet name. """ beam_nodes, edges, mask, pid = _read_purkinje_from_kfile(filename) @@ -394,7 +393,7 @@ def add_purkinje_from_kfile(self, filename: pathlib.Path, name: str) -> None: connectivity = np.empty_like(edges) np.copyto(connectivity, edges) - # create ids of solid points and fill connectivity + # create IDs of solid points and fill connectivity _, _, inverse_indices = np.unique( connectivity[np.logical_not(mask)], return_index=True, return_inverse=True ) @@ -419,7 +418,7 @@ def load_input(self, input_vtp: pv.PolyData, part_definitions: dict, scalar: str Parameters ---------- input_vtp : pv.PolyData - The input surface mesh, represented by a VTK PolyData object. + Input surface mesh represented by a VTK ``PolyData`` object. part_definitions : dict Part definitions of the input model. Each part is enclosed by N number of boundaries. scalar : str @@ -431,7 +430,7 @@ def load_input(self, input_vtp: pv.PolyData, part_definitions: dict, scalar: str scalar=scalar, ) if self._input is None: - LOGGER.error("Failed to initialize input model. Please check input arguments.") + LOGGER.error("Failed to initialize input model. Check the input arguments.") exit() return @@ -450,22 +449,20 @@ def mesh_volume( Parameters ---------- - use_wrapper : bool, optional - Flag for switch to non-manifold mesher, by default False - overwrite_existing_mesh : bool, optional - Flag indicating whether to overwrite the existing .msh.h5 mesh, by default True - global_mesh_size : float, optional - Global mesh size used for the generated mesh, by default 1.5 - path_to_fluent_mesh : str, optional - Path to the generated Fluent .msh.h5 mesh, by default None - mesh_size_per_part : dict, optional - Dictionary specifying the target mesh size for each part, by default None. - _global_wrap_size : float, optional - Global size used for setting up the size-field for the shrink-wrap algorithm, - by default None - _wrap_size_per_part : dict, optional - Per part size used for setting up the size-field for the shrink-wrap algorithm, - by default None + use_wrapper : bool, default: False + Whether to use the non-manifold mesher. + overwrite_existing_mesh : bool, default: True + Whether to overwrite the existing MSH.H5 mesh. + global_mesh_size : float, default: 1.5 + Global mesh size for the generated mesh. + path_to_fluent_mesh : str, default: None + Path to the generated Fluent MSH.H5 mesh. + mesh_size_per_part : dict, default: None + Dictionary specifying the target mesh size for each part. + _global_wrap_size : float, default: 1.5 + Global size for setting up the size-field for the shrink-wrap algorithm. + _wrap_size_per_part : dict, default: None + Per part size for setting up the size-field for the shrink-wrap algorithm. Examples -------- @@ -482,14 +479,15 @@ def mesh_volume( Notes ----- - When the input surfaces are non-manifold the wrapper tries + When the input surfaces are non-manifold, the wrapper tries to reconstruct the surface and parts. Inevitably this leads to reconstruction errors. Nevertheless, in many instances this approach is - robuster than meshing from a manifold surface. Moreover, any clear interface + more robust than meshing from a manifold surface. Moreover, any clear interface between parts is potentially lost. - When mesh_size_per_part is incomplete, remaining part sizes default to the + + When the ``mesh_size_per_part`` is incomplete, remaining part sizes default to the global mesh size. This is an experimental setting. Any wrap sizes given - as input argument are ignored when the wrapper is not used. + as input arguments are ignored when the wrapper is not used. """ if not path_to_fluent_mesh: path_to_fluent_mesh = os.path.join(self.workdir, "simulation_mesh.msh.h5") @@ -525,8 +523,8 @@ def _mesh_fluid_volume(self, remesh_caps: bool = True): Parameters ---------- - remesh_caps : bool, optional - Flag indicating whether to remesh the caps of each cavity, by default True + remesh_caps : bool, default: True + Whether to remesh the caps of each cavity. """ # get all relevant boundaries for the fluid cavities: substrings_include = ["endocardium", "valve-plane", "septum"] @@ -546,11 +544,13 @@ def _mesh_fluid_volume(self, remesh_caps: bool = True): caps = [c._mesh for p in self.parts for c in p.caps] if len(boundaries_fluid) == 0: - LOGGER.debug("Meshing of fluid cavities not possible. No fluid surfaces detected.") + LOGGER.debug( + "Meshing of fluid cavities is not possible. No fluid surfaces are detected." + ) return if len(caps) == 0: - LOGGER.debug("Meshing of fluid cavities not possible. No caps detected.") + LOGGER.debug("Meshing of fluid cavities is not possible. No caps are detected.") return LOGGER.info("Meshing fluid cavities...") @@ -591,7 +591,7 @@ def _mesh_fluid_volume(self, remesh_caps: bool = True): return def get_part(self, name: str, by_substring: bool = False) -> Union[Part, None]: - """Get specific part based on part name.""" + """Get a specific part based on a part name.""" found = False for part in self.parts: if part.name == name: @@ -630,10 +630,10 @@ def plot_mesh(self, show_edges: bool = True, color_by: str = "_volume-id"): Parameters ---------- - show_edges : bool, optional - Whether to plot the edges, by default True - color_by : str, optional - Color by cell/point data, by default "_volume-id" + show_edges : bool, default: True + Whether to plot the edges. + color_by : str, default: ``'_volume-id'`` + Color by cell/point data. Examples -------- @@ -648,12 +648,12 @@ def plot_mesh(self, show_edges: bool = True, color_by: str = "_volume-id"): return def plot_part(self, part: Part): - """Plot a part in mesh. + """Plot a part in the mesh. Parameters ---------- part : Part - part to highlight in mesh + Part to highlight in the mesh. Examples -------- @@ -675,10 +675,11 @@ def plot_fibers(self, n_seed_points: int = 1000): Parameters ---------- - plot_raw_mesh : bool, optional - Flag indicating whether to plot the streamlines on the raw mesh, by default False - n_seed_points : int, optional - Number of seed points. Recommended to use 5000, by default 1000 + plot_raw_mesh : bool, default: False + Whether to plot the streamlines on the raw mesh. + n_seed_points : int, default: 1000 + Number of seed points. While the default is ``1000``, using ``5000`` + is recommended. Examples -------- @@ -690,13 +691,13 @@ def plot_fibers(self, n_seed_points: int = 1000): # fiber direction is stored in cell data, but the cell-to-point filter # leads to issues, where nan values in any non-volume cell may change - # the fiber direction in the target point(s). + # the fiber direction in the target points. mesh = self.mesh.extract_cells_by_type([pv.CellType.TETRA, pv.CellType.HEXAHEDRON]) mesh = mesh.ctp() streamlines = mesh.streamlines(vectors="fiber", source_radius=75, n_points=n_seed_points) if streamlines.n_cells == 0: LOGGER.error( - "Failed to generate streanlines with radius {source_radius} and {n_seed_points}" + "Failed to generate streamlines with radius {source_radius} and {n_seed_points}." ) return None tubes = streamlines.tube() @@ -706,7 +707,7 @@ def plot_fibers(self, n_seed_points: int = 1000): return plotter def plot_surfaces(self, show_edges: bool = True): - """Plot all the surfaces in the model. + """Plot all surfaces in the model. Examples -------- @@ -719,7 +720,7 @@ def plot_surfaces(self, show_edges: bool = True): try: import matplotlib as matplotlib except ImportError: - LOGGER.warning("matplotlib not found. Install matplotlib with: pip install matplotlib") + LOGGER.warning("Matplotlib is not found. Install with 'pip install matplotlib'.") return surfaces_to_plot = [s for p in self.parts for s in p.surfaces] @@ -747,7 +748,7 @@ def plot_surfaces(self, show_edges: bool = True): def plot_purkinje(self): """Plot the mesh and Purkinje network.""" if self.conduction_system is None or self.conduction_system.number_of_cells == 0: - LOGGER.info("No Conduction system to plot.") + LOGGER.info("No conduction system was found.") return try: @@ -758,21 +759,21 @@ def plot_purkinje(self): plotter.add_mesh(beams, line_width=2) plotter.show() except Exception: - LOGGER.warning("Failed to plot mesh.") + LOGGER.warning("Failed to plot the mesh.") return def save_model(self, filename: str): - """Save the model and necessary info to reconstruct. + """Save the model and necessary information to reconstruct. Parameters ---------- filename : str - Path to the model + Path to the model. Notes ----- - The mesh of the heart model will be saved as .vtu file, and - an additional partinfo.json file will be written to reconstruct + The mesh of the heart model is saved as a VTU file. An + additional ``partinfo.json`` file is written to reconstruct the heart model from the VTU file. Examples @@ -802,14 +803,14 @@ def save_model(self, filename: str): # TODO: Should consider to also reconstruct the parts that are not explicitly # TODO: defined in the class. def load_model_from_mesh(self, filename_mesh: str, filename_part_info: str): - """Load model from an existing VTU file and part info dictionary. + """Load a model from an existing VTU file and part information dictionary. Parameters ---------- filename_mesh : str Path to the VTU file containing the mesh. filename_part_info : str - Path to the JSON file that contains the part info to reconstruct the model. + Path to the JSON file that contains the part information for reconstructing the model. Examples -------- @@ -836,7 +837,7 @@ def load_model_from_mesh(self, filename_mesh: str, filename_part_info: str): try: list(part_info.keys()).index(part_1.name) except ValueError: - LOGGER.warning(f"{part_1.name} not in part info") + LOGGER.warning(f"{part_1.name} is not in the part information.") continue #! try to add surfaces to part by using the pre-defined surfaces @@ -856,7 +857,7 @@ def load_model_from_mesh(self, filename_mesh: str, filename_part_info: str): np.isin(self.mesh.cell_data["_volume-id"], part_1.pid) ).flatten() except Exception: - LOGGER.warning(f"Failed to set element ids for {part_1.name}") + LOGGER.warning(f"Failed to set element IDs for {part_1.name}.") pass # try to initialize cavity object. @@ -882,13 +883,13 @@ def load_model_from_mesh(self, filename_mesh: str, filename_part_info: str): LOGGER.warning("Failed to extract apex. Consider setting apex manually.") if any(v is None for v in [self.short_axis, self.l4cv_axis, self.l2cv_axis]): - LOGGER.warning("Heart not defined in the VTU file.") + LOGGER.warning("Heart is not defined in the VTU file.") try: LOGGER.warning("Computing heart axis...") self._define_anatomy_axis() except Exception: LOGGER.error( - "Failed to extract heart axis. Consider computing and setting them manually." + "Failed to extract heart axis. Consider computing and setting manually." ) else: LOGGER.info("Heart axis defined in the VTU file is reused...") @@ -896,7 +897,7 @@ def load_model_from_mesh(self, filename_mesh: str, filename_part_info: str): return def _set_part_ids(self): - """Populate part ids.""" + """Populate part IDs.""" c = 1 for p in self.parts: p.pid = c @@ -910,7 +911,7 @@ def _add_subparts(self) -> None: return def _get_used_element_ids(self) -> np.ndarray: - """Return array of used element ids.""" + """Get an array of used element IDs.""" element_ids = np.empty(0, dtype=int) for part in self.parts: element_ids = np.append(element_ids, part.element_ids) @@ -922,16 +923,16 @@ def _update_parts(self): Notes ----- - 1. Extracts septum - 2. Updates Parts to include element ids of the respective part - 3. Assign surfaces to each part - 4. Extracts the closing caps - 5. Creates cavities - 6. Extracts apical points - 7. Computes left-ventricle axis - 8. Computes left-ventricle 17 segments - 9. Adds nodal areas - 10. Adds surface normals to boundaries + 1. Extracts septum. + 2. Updates parts to include element IDs of the respective part. + 3. Assigns surfaces to each part. + 4. Extracts the closing caps. + 5. Creates cavities. + 6. Extracts apical points. + 7. Computes left-ventricle axis. + 8. Computes left-ventricle 17 segments. + 9. Adds nodal areas. + 10. Adds surface normals to boundaries. """ self._sync_input_parts_to_model_parts() @@ -969,7 +970,7 @@ def _sync_input_parts_to_model_parts(self): Notes ----- Checks: - overwrites the default part ids by those given by user. + Overwrites the default part IDs by those given by the user. """ # unassign any part ids. for p in self.parts: @@ -993,9 +994,7 @@ def _sync_input_parts_to_model_parts(self): def _extract_septum(self, num_layers_to_remove: int = 1) -> None: """Separate the septum elements from the left ventricle. - Notes - ----- - Uses the septum surface of the right ventricle + This method uses the septum surface of the right ventricle. """ if not isinstance(self, (BiVentricle, FourChamber, FullHeart)): LOGGER.warning("Model type: {0} Not extracting septum elements".format(type(self))) @@ -1012,7 +1011,7 @@ def _extract_septum(self, num_layers_to_remove: int = 1) -> None: "Expecting only one surface that contains string: 'septum'" ) if len(septum_name) == 0: - raise InvalidHeartModelError("No boundary with name: 'septum' found") + raise InvalidHeartModelError("No boundary found with name: 'septum'") surface_septum = self.mesh.get_surface_by_name(septum_name[0]) # extrude septum surface @@ -1046,7 +1045,7 @@ def _extract_septum(self, num_layers_to_remove: int = 1) -> None: self.mesh.cell_data["_volume-id"][element_ids_septum] = part.pid self.mesh._volume_id_to_name[int(part.pid)] = part.name - # remove these element ids from the left-ventricle + # remove these element ID from the left-ventricle part = next(part for part in self.parts if part.name == "Left ventricle") mask = np.isin(part.element_ids, element_ids_septum, invert=True) part.element_ids = part.element_ids[mask] @@ -1058,12 +1057,12 @@ def _extract_apex(self, check_edge: bool = True) -> None: Notes ----- - Apex is the defined as the point furthest from the mid-point between cap/valves. + Apex is defined as the point furthest from the mid-point between the cap/valves. Parameters ---------- - check_edge : bool, optional - Checks and corrects if the apical point is on the edge of a surface, by default True + check_edge : bool, default: True + Whether to check and correct if the apical point is on the edge of a surface. """ ventricles = [p for p in self.parts if "ventricle" in p.name] surface_substrings = ["endocardium", "epicardium"] @@ -1101,7 +1100,7 @@ def _extract_apex(self, check_edge: bool = True) -> None: ] LOGGER.warning( - f"Initial apical point is on edge of {surface.name}, the next closest point is used" # noqa: E501 + f"Initial apical point is on edge of {surface.name}. The next closest point is used." # noqa: E501 ) # assign apex point @@ -1116,13 +1115,13 @@ def _extract_apex(self, check_edge: bool = True) -> None: return def _assign_elements_to_parts(self) -> None: - """Get the element ids of each part and assign these to the Part objects.""" - # get element ids of each part. + """Get the element IDs of each part and assign these to the ``Part`` objects.""" + # get element IDs of each part. used_element_ids = self._get_used_element_ids() for part in self.parts: if len(part.element_ids) > 0: LOGGER.warning( - "Part {0} seems to already have elements assigned: skipping".format(part.name) + "Part {0} seems to already have elements assigned. Skipping.".format(part.name) ) continue # ! this is valid as long as no additional surfaces are added in self.mesh. @@ -1156,7 +1155,7 @@ def _assign_surfaces_to_parts(self) -> None: septum_candidates = [s for s in self.mesh.surface_names if "septum" in s] if len(septum_candidates) > 1: LOGGER.warning( - "Multiple candidate surfaces for septum found, using first one." + "Multiple candidate surfaces for septum exist. Using the first one." ) boundary_surface = self.mesh.get_surface_by_name(septum_candidates[0]) except Exception: @@ -1193,14 +1192,14 @@ def _assign_cavities_to_parts(self) -> None: if "endocardium" in s.name and s.n_cells > 0 ] if len(surfaces) == 0: - LOGGER.warning(f"Skipping part {part.name}: only empty surfaces present.") + LOGGER.warning(f"Skipping part {part.name}. Only empty surfaces are present.") continue surface: SurfaceMesh = SurfaceMesh(pv.merge(surfaces)) surface.name = part.name + " cavity" # save this cavity mesh to the centralized mesh object - surface.id = int(np.sort(self.mesh.surface_ids)[-1] + 1) # get unique id. + surface.id = int(np.sort(self.mesh.surface_ids)[-1] + 1) # get unique ID. # Generate patches that close the surface. patches = vtk_utils.get_patches_with_centroid(surface) @@ -1263,7 +1262,7 @@ def _assign_cavities_to_parts(self) -> None: return def _update_cap_types(self): - """Try to update the cap types using names of connected boundaries.""" + """Try to update the cap types using the names of the connected boundaries.""" boundaries_to_check = [ s for s in self.mesh._surfaces if "valve" in s.name or "inlet" in s.name ] @@ -1295,7 +1294,7 @@ def _update_cap_types(self): return def _validate_cap_names(self): - """Validate that caps are attached to right part.""" + """Validate that caps are attached to the right part.""" for part in self.parts: cap_types = [c.type for c in part.caps] if part.name == "Left ventricle": @@ -1325,7 +1324,7 @@ def _validate_cap_names(self): unexpected_captypes = [ctype for ctype in cap_types if ctype not in expected_cap_types] if len(unexpected_captypes) > 0: LOGGER.error( - "Part: {0}. Cap types {1} not in expected cap types:{2}".format( + "Part: {0}. Cap types {1} are not in expected cap types:{2}".format( part.name, unexpected_captypes, expected_cap_types ) ) @@ -1340,7 +1339,7 @@ def _validate_surfaces(self): is_valid = True else: for invalid_s in invalid_surfaces: - LOGGER.error(f"Surface {invalid_s.name} is empty") + LOGGER.error(f"Surface {invalid_s.name} is empty.") is_valid = False self._sync_epicardium_with_part() @@ -1355,21 +1354,21 @@ def _validate_parts(self): is_valid = True else: for invalid_p in invalid_parts: - LOGGER.error(f"Part {invalid_p.name} is empty") + LOGGER.error(f"Part {invalid_p.name} is empty.") is_valid = False return is_valid def _sync_epicardium_with_part(self): - """Clean epicardial surfaces such that these use only nodes of part.""" + """Clean epicardial surfaces such that these use only nodes of the part.""" for part in self.parts: self.mesh._set_global_ids() global_node_ids_part = self.mesh.extract_cells(part.element_ids).point_data[ "_global-point-ids" ] - # ! The only info we use from surface here is the id, and not the mesh info - # ! we need to go back to the central mesh to obtain an updated copy of + # ! The only information we use from surface here is the ID, not the mesh information. + # ! We need to go back to the central mesh to obtain an updated copy of # ! the corresponding mesh. for surface in part.surfaces: if "epicardium" in surface.name: @@ -1382,7 +1381,7 @@ def _sync_epicardium_with_part(self): # do not use any faces that use a node not in the part. mask = np.all(np.isin(surface.triangles, np.argwhere(mask).flatten()), axis=1) - LOGGER.debug(f"Removing {np.sum(np.invert(mask))} faces from {surface.name}") + LOGGER.debug(f"Removing {np.sum(np.invert(mask))} faces from {surface.name}.") surface.triangles = surface.triangles[mask, :] # add updated mesh to global mesh. @@ -1428,21 +1427,21 @@ def get_apex_node_set( option: Literal["endocardium", "epicardium", "myocardium"] = "epicardium", radius: float = 3, ) -> np.ndarray: - """Get a node set around apex point. + """Get a nodeset around the apex point. Parameters ---------- - part : left", "right"], optional - on which part, by default "left" - option : endocardium", "epicardium", "myocardium"], optional - on surface or in mesh, by default "epicardium" - radius : float, optional - search in radius, by default 3 + part : Literal["left", "right"], default: "left" + On which part. + option : Literal["endocardium", "epicardium", "myocardium"], default: "epicardium" + On surface or in mesh. + radius : float, default: 3 + Search in radius. Returns ------- np.ndarray - apex node set + Apex nodeset """ import scipy.spatial as spatial @@ -1469,11 +1468,11 @@ def get_apex_node_set( def _create_atrioventricular_isolation(self) -> Union[None, Part]: """ - Extract a layer of element to isolate between ventricles and atrium. + Extract a layer of element to isolate between the ventricles and atrium. Notes ----- - These elements are initially belong to atrium. + These elements initially belong to the atrium. Returns ------- @@ -1482,7 +1481,7 @@ def _create_atrioventricular_isolation(self) -> Union[None, Part]: """ # TODO: move this method to FourChamber class. if not isinstance(self, FourChamber): - LOGGER.error("This method is only for FourChamber model.") + LOGGER.error("This method is only for the four-chamber heart model.") return # find interface nodes between ventricles and atrial @@ -1535,7 +1534,7 @@ def _create_atrioventricular_isolation(self) -> Union[None, Part]: if interface_eids.shape[0] == 0: LOGGER.warning( """Atria and ventricles do not seem to be - connected, not generating a separate part for isolation.""" + connected. Not generating a separate part for isolation.""" ) return None @@ -1560,14 +1559,16 @@ def create_stiff_ventricle_base( Parameters ---------- - threshold_left_ventricle : float, optional - uvc_l larger than threshold will be set as stiff material, by default 0.9 - threshold_right_ventricle : float, optional - a uvc_l value larger than this threshold in the right ventricle will be set to a stiff - material, by default 0.95 - stiff_material : MechanicalMaterialModel, optional - material to assign, by default MAT295(rho=0.001, + threshold_left_ventricle : float, default: 0.9 + If the ``uvc_l`` value is larger than this threshold in the left ventricle, + it is set as stiff material. + threshold_right_ventricle : float, default: 0.95 + If the ``uvc_l`` value is larger than this threshold in the right ventricle, + it is set to a stiff + material. + stiff_material : MechanicalMaterialModel, default: MAT295(rho=0.001, iso=ISO(itype=1, beta=2, kappa=10, mu1=0.1, alpha1=2) + Material to assign. Returns ------- @@ -1577,8 +1578,8 @@ def create_stiff_ventricle_base( try: v = self.mesh.point_data_to_cell_data()["apico-basal"] except KeyError: - LOGGER.error("Array named 'apico-basal' cannot be found, cannot create base part.") - LOGGER.error("Please call simulator.compute_uhc() first.") + LOGGER.error("Array named 'apico-basal' is not found. Cannot create base part.") + LOGGER.error("Call simulator.compute_uhc() first.") return eids = np.intersect1d( @@ -1603,25 +1604,25 @@ def create_stiff_ventricle_base( return part def create_atrial_stiff_ring(self, radius: float = 2) -> None | Part: - """Create a part for solids close to atrial caps. + """Create a part for solids close to the atrial caps. Note ---- - Part will be passive and isotropic, material need to be defined + Part created is passive and isotropic. The material must be defined. Parameters ---------- - radius : foat, optional - Influence region, by default 2 + radius : foat, default: 2 + Influence region. Returns ------- Union[None, Part] - Part of atrial rings if created + Part of atrial rings if created. """ # TODO: @mhoeijm move this to FourChamber class if not isinstance(self, FourChamber): - LOGGER.error("This method is only for FourChamber model.") + LOGGER.error("This method is only for the four-chamber heart model.") return # get ring cells from cap node list @@ -1663,7 +1664,7 @@ def create_atrial_stiff_ring(self, radius: float = 2) -> None | Part: class LeftVentricle(HeartModel): - """Model of just the left ventricle.""" + """Model of only the left ventricle.""" def __init__(self, working_directory: pathlib.Path | str = None) -> None: self.left_ventricle: Part = Part(name="Left ventricle", part_type=PartType.VENTRICLE) @@ -1679,7 +1680,7 @@ def __init__(self, working_directory: pathlib.Path | str = None) -> None: class BiVentricle(HeartModel): - """Model of the left and right ventricle.""" + """Model of the left and right ventricles.""" def __init__(self, working_directory: pathlib.Path | str = None) -> None: self.left_ventricle: Part = Part(name="Left ventricle", part_type=PartType.VENTRICLE) @@ -1734,7 +1735,7 @@ def __init__(self, working_directory: pathlib.Path | str = None) -> None: class FullHeart(FourChamber): - """Model of both ventricles, both atria, aorta and pulmonary artery.""" + """Model of both ventricles, both atria, the aorta, and the pulmonary artery.""" def __init__(self, working_directory: pathlib.Path | str = None) -> None: self.left_ventricle: Part = Part(name="Left ventricle", part_type=PartType.VENTRICLE) diff --git a/src/ansys/health/heart/objects.py b/src/ansys/health/heart/objects.py index 8ab57eb92..534edcc70 100644 --- a/src/ansys/health/heart/objects.py +++ b/src/ansys/health/heart/objects.py @@ -87,19 +87,19 @@ def _get_fill_data( def _get_global_cell_ids(mesh: pv.UnstructuredGrid, celltype: pv.CellType) -> np.ndarray: - """Get the global cell ids of a particular cell type. + """Get the global cell iID of a given cell type. Parameters ---------- mesh : pv.UnstructuredGrid - Unstructured grid from which to obtain the global cell ids + Unstructured grid to obtain the global cell IDs from. celltype : pv.CellType - Cell type to get global cell ids of. + Cell type to get global cell IDs of. Returns ------- np.ndarray - Array with global cell ids. + Array with global cell IDs. """ return np.argwhere(np.isin(mesh.celltypes, celltype)).flatten() @@ -135,11 +135,11 @@ def __init__(self, name: str = None) -> None: self.type = None """Type of feature.""" self._node_set_id: int = None - """Node set id associated with feature.""" + """Nodeset ID associated with feature.""" self._seg_set_id: int = None - """Segment set id associated with feature.""" + """Segment set ID associated with feature.""" self.pid: int = None - """Part id associated with the feature.""" + """Part ID associated with feature.""" pass @@ -179,7 +179,7 @@ def nodes(self, array: np.ndarray): elif num_extra_points < 0: raise NotImplementedError( - "Assigning less nodes than the original, not implemented yet." + "Assigning less nodes than the original. Not implemented yet." ) except Exception as e: @@ -188,7 +188,7 @@ def nodes(self, array: np.ndarray): @property def triangles(self): - """Triangular faces of the surface num_faces x 3.""" + """Triangular faces of the surface ''num_faces'' x 3.""" faces = np.reshape(self.faces, (self.n_cells, 3 + 1))[:, 1:] return faces @@ -204,25 +204,25 @@ def triangles(self, value: np.ndarray): @property def triangles_global(self): - """Global triangle ids. + """Global triangle IDs. Returns ------- - Tries to use point_data["_global-point-ids"] to retrieve - triangle definitions in global ids. + Tries to use ``point_data["_global-point-ids"]`` to retrieve + triangle definitions in global IDs. """ return self.point_data["_global-point-ids"][self.triangles] @property def boundary_edges(self): - """Get boundary edges of self.""" + """Boundary edges of self.""" boundary_edges = vtk_utils.get_boundary_edge_loops(self, remove_open_edge_loops=False) boundary_edges = np.vstack(list(boundary_edges.values())) return boundary_edges @property def boundary_edges_global(self): - """Global point ids of boundary edges.""" + """Global point IDs of boundary edges.""" return self.point_data["_global-point-ids"][self.boundary_edges] def __init__( @@ -248,38 +248,38 @@ def __init__( """Name of the surface.""" self.id: int = id - """ID of surface.""" + """ID of the surface.""" self.triangles = triangles - """Triangular faces of the surface num_faces x 3.""" + """Triangular faces of the surface ``num_faces`` x 3.""" self.nodes = nodes """Node coordinates.""" self._seg_set_id: int = None - """Segment set id.""" + """Segment set ID.""" self._node_set_id: int = None - """Node set id.""" + """Nodeset ID.""" @property def node_ids_triangles(self) -> np.ndarray: - """Local node ids - sorted by earliest occurrence.""" + """Local node IDs sorted by earliest occurrence.""" _, idx = np.unique(self.triangles.flatten(), return_index=True) node_ids = self.triangles.flatten()[np.sort(idx)] return node_ids @property def global_node_ids_triangles(self): - """Retrieve the global node ids from point data.""" + """Global node IDs from point data.""" return self.point_data["_global-point-ids"][self.node_ids_triangles] @property def _boundary_nodes(self) -> np.ndarray: - """Global node ids of nodes on the boundary of the mesh (if any).""" + """Global node IDs of nodes on the boundary of the mesh (if any).""" _, idx = np.unique(self.boundary_edges.flatten(), return_index=True) node_ids = self.boundary_edges.flatten()[np.sort(idx)] return node_ids def force_normals_inwards(self): - """Force the cell ordering of a the closed surface such that normals point inward.""" + """Force the cell ordering of the closed surface such that normals point inward.""" if not self.is_manifold: LOGGER.warning("Surface is non-manifold.") @@ -326,7 +326,7 @@ def nodes(self, array: np.ndarray): @property def edges(self): - """Tetrahedrons num_tetra x 4.""" + """Tetrahedrons ``num_tetra`` x 4.""" return self.cells_dict[pv.CellType.LINE] @edges.setter @@ -354,22 +354,22 @@ def __init__( Feature.__init__(self, name) self.edges = edges - """Beams edges.""" + """Beam edges.""" self.nodes = nodes """Node coordinates.""" self.pid = pid - """Part id associated with the network.""" + """Part ID associated with the network.""" self.nsid: int = nsid - """Surface id associated with the network.""" + """Surface ID associated with the network.""" self._all_beam_nodes: np.ndarray = np.empty((0, 3)) - """Temporary attribute to save all previously created beam nodes.""" + """Temporary attribute to save all previously created beam nodes to.""" self.ep_material: EPMaterial = EPMaterial.DummyMaterial() - """Initialize dummy ep material model.""" + """Initialize dummy EP material model.""" class Cavity(Feature): @@ -440,19 +440,19 @@ class Cap(Feature): @property def _local_node_ids_edge(self): - """Local node ids of cap edge.""" + """Local node IDs of the cap edge.""" edges = vtk_utils.get_boundary_edge_loops(self._mesh) edge_local_ids = np.unique(np.array([np.array(edge) for edge in edges.values()])) return edge_local_ids @property def global_node_ids_edge(self): - """Global node ids of the edge of the cap.""" + """Global node IDs of the edge of the cap.""" return self._mesh.point_data["_global-point-ids"][self._local_node_ids_edge] @property def _local_centroid_id(self): - """Local id of centroid.""" + """Local ID of the centroid.""" centroid_id = np.setdiff1d(np.arange(0, self._mesh.n_points), self._local_node_ids_edge) if len(centroid_id) != 1: LOGGER.error("Failed to identify single centroid node.") @@ -462,17 +462,17 @@ def _local_centroid_id(self): @property def global_centroid_id(self): - """Global centroid id.""" + """Global centroid ID.""" return self._mesh.point_data["_global-point-ids"][self._local_centroid_id] @property def centroid(self): - """Centroid of cap.""" + """Centroid of the cap.""" return self._mesh.points[self._local_centroid_id, :] @property def cap_normal(self): - """Compute mean normal of cap.""" + """Compute mean normal of the cap.""" return np.mean(self._mesh.compute_normals().cell_data["Normals"], axis=0) def __init__( @@ -481,50 +481,50 @@ def __init__( cap_type: CapType = None, ) -> None: super().__init__(name) - """Centroid of cap ID (in case centroid node is created).""" + """Centroid of the cap ID (in case centroid node is created).""" self._mesh: SurfaceMesh = None if cap_type is None or isinstance(cap_type, CapType): self.type = cap_type else: - LOGGER.warning(f"Failed to set cap type for {name}, {cap_type}") + LOGGER.warning(f"Failed to set cap type for {name}, {cap_type}.") return class Point(Feature): - """Point class. Can be used to collect relevant points in the mesh.""" + """Point class, which can be used to collect relevant points in the mesh.""" def __init__(self, name: str = None, xyz: np.ndarray = None, node_id: int = None) -> None: super().__init__(name) self.xyz: np.ndarray = xyz - """XYZ Coordinates of point.""" + """XYZ coordinates of the point.""" self.node_id: int = node_id - """Global node id of point.""" + """Global node ID of the point.""" class Mesh(pv.UnstructuredGrid): - """Mesh class: inherits from pyvista UnstructuredGrid. + """Mesh class, which inherits from ``pyvista UnstructuredGrid``. Notes ----- - This class inherits from pyvista.UnstructuredGrid and adds additional - attributes and convenience methods for enhanced functionality. E.g. we use _volume_id, - _surface_id and _line_id cell arrays to keep track of "labeled" selections of - cells. _volume_id is used to group 3D volume cells together. - Any non 3D volume cell is labeled as numpy.nan. Similarly 2D and 1D cells are tracked - through _surface_id and _line_id respectively. + This class inherits from ``pyvista.UnstructuredGrid`` and adds additional + attributes and convenience methods for enhanced functionality. We use ``_volume_id``, + ``_surface_id``, and ``_line_id`` cell arrays to keep track of *labeled* selections of + cells. ``_volume_id`` is used to group 3D volume cells together. + Any non-3D volume cell is labeled as ``numpy.nan``. Similarly 2D and 1D cells are tracked + through ``_surface_id`` and ``_line_id`` respectively. """ @property def tetrahedrons(self): - """Tetrahedrons num_tetra x 4.""" + """Tetrahedrons ``num_tetra`` x 4.""" return self.cells_dict[pv.CellType.TETRA] @property def triangles(self): - """Get all triangles of the mesh.""" + """All triangles of the mesh.""" return self.cells_dict[pv.CellType.TRIANGLE] @property @@ -544,7 +544,7 @@ def _surfaces(self) -> List[SurfaceMesh]: try: surface.name = self._surface_id_to_name[sid] except KeyError as error: - LOGGER.debug(f"Failed to give surface with id {sid} a name. {error}") + LOGGER.debug(f"Failed to give surface with ID {sid} a name. {error}") surfaces.append(surface) return surfaces @@ -557,22 +557,22 @@ def _volumes(self): @property def _global_triangle_ids(self): - """Global ids of triangular cells.""" + """Global IDs of triangular cells.""" return _get_global_cell_ids(self, pv.CellType.TRIANGLE) @property def _global_tetrahedron_ids(self): - """Global ids of tetrahedral cells.""" + """Global IDs of tetrahedral cells.""" return _get_global_cell_ids(self, pv.CellType.TETRA) @property def surface_ids(self) -> np.ndarray: - """Unique surface ids. + """Unique surface IDs. Returns ------- np.ndarray - Array with unique surface ids + Array with unique surface IDs. """ try: mask = np.isin(self.celltypes, _SURFACE_CELL_TYPES) @@ -580,7 +580,7 @@ def surface_ids(self) -> np.ndarray: mask = np.all(np.vstack((mask, mask1)), axis=0) return np.unique(self.cell_data["_surface-id"][mask]) except KeyError: - LOGGER.debug(f"Failed to extract one of {_SURFACE_CELL_TYPES}") + LOGGER.debug(f"Failed to extract one of {_SURFACE_CELL_TYPES}.") return [] @property @@ -590,12 +590,12 @@ def surface_names(self) -> List[str]: @property def volume_ids(self) -> np.ndarray: - """Unique volume ids. + """Unique volume IDs. Returns ------- np.ndarray - Array with unique volume ids + Array with unique volume IDs. """ try: mask = np.isin(self.celltypes, _VOLUME_CELL_TYPES) @@ -603,7 +603,7 @@ def volume_ids(self) -> np.ndarray: mask = np.all(np.vstack((mask, mask1)), axis=0) return np.unique(self.cell_data["_volume-id"][mask]) except KeyError: - LOGGER.debug(f"Failed to extrect one of {_VOLUME_CELL_TYPES}") + LOGGER.debug(f"Failed to extract one of {_VOLUME_CELL_TYPES}.") return None @property @@ -613,12 +613,12 @@ def volume_names(self) -> List[str]: @property def line_ids(self) -> np.ndarray: - """Unique line ids. + """Unique line IDs. Returns ------- np.ndarray - Array with unique line ids + Array with unique line IDs. """ try: mask = self.celltypes == pv.CellType.LINE @@ -638,13 +638,13 @@ def _volume_name_to_id(self): @property def _global_cell_ids(self): - """Global cell ids.""" + """Global cell IDs.""" self._set_global_ids() return self.cell_data["_global-cell-ids"] @property def _global_point_ids(self): - """Global point ids.""" + """Global point IDs.""" self._set_global_ids() return self.point_data["_global-point-ids"] @@ -652,9 +652,9 @@ def __init__(self, *args): super().__init__(*args) self._surface_id_to_name: dict = {} - """Surface id to name map.""" + """Surface ID to name map.""" self._volume_id_to_name: dict = {} - """Volume id to name map.""" + """Volume ID to name map.""" pass def _add_mesh( @@ -668,14 +668,14 @@ def _add_mesh( Notes ----- - Adding the mesh is always in-place + Adding the mesh is always in place. Parameters ---------- mesh_input : pv.PolyData | pv.UnstructuredGrid - Mesh to add, either PolyData or UnstructuredGrid - keep_data : bool, optional - Flag specifying whether to try to keep mesh point/cell data, by default True + Mesh to add, either ``PolyData`` or ``UnstructuredGrid``. + keep_data : bool, default: True + Whether to try to keep mesh point/cell data. """ mesh = copy.copy(mesh_input) if keep_data: @@ -708,7 +708,7 @@ def _add_mesh( return self def _set_global_ids(self): - """Add global cell and point ids as cell and point data array.""" + """Add global cell and point IDs as cell and point data array.""" self.cell_data["_global-cell-ids"] = np.array(np.arange(0, self.n_cells), dtype=int) self.point_data["_global-point-ids"] = np.array(np.arange(0, self.n_points), dtype=int) return @@ -718,7 +718,7 @@ def _get_submesh( ) -> pv.UnstructuredGrid: # NOTE: extract_cells cleans the object, removing any unused points. if scalar not in self.cell_data.keys(): - LOGGER.debug(f"{scalar} does not exist in cell_data") + LOGGER.debug(f"{scalar} does not exist in 'cell_data'.") return None mask = np.isin(self.cell_data[scalar], sid) self._set_global_ids() @@ -756,14 +756,14 @@ def load_mesh(self, filename: Union[str, pathlib.Path]): Notes ----- - This tries to read a JSON file with the volume/surface id to name map - with extension .namemap.json in the same directory as the file. Alternatively, - you can read the name map manually by calling `._load_id_to_name_map(filename)` + This tries to read a JSON file with the volume/surface ID to name map + with extension ``.namemap.json`` in the same directory as the file. Alternatively, + you can read the name map manually by calling ``._load_id_to_name_map(filename)``. Parameters ---------- filename : Union[str, pathlib.Path] - Path to filename. + Full path to the mesh file. """ super(Mesh, self).__init__(filename) extension = pathlib.Path(filename).suffix @@ -773,19 +773,19 @@ def load_mesh(self, filename: Union[str, pathlib.Path]): except FileNotFoundError: if not os.path.isfile(filename_map): LOGGER.warning( - f"""{filename_map} not found. Please set id_to_name map manually by - mesh._load_id_to_name_map(filename)""" + f"""{filename_map} not found. Set 'id_to_name' map manually with + 'mesh._load_id_to_name_map(filename)'.""" ) else: LOGGER.error( - f"""Failed to read surface/volume id to name map from {filename_map}. - Please set id_to_name map manually by - mesh._load_id_to_name_map(filename)""" + f"""Failed to read surface/volume ID to name map from {filename_map}. + Set 'id_to_name' map manually with + 'mesh._load_id_to_name_map(filename)'.""" ) return def _save_id_to_name_map(self, filename: Union[str, pathlib.Path]): - """Save the id to name map. + """Save the ID to name map. Parameters ---------- @@ -800,12 +800,12 @@ def _save_id_to_name_map(self, filename: Union[str, pathlib.Path]): json.dump(id_to_name, f, indent=4) def _load_id_to_name_map(self, filename: Union[str, pathlib.Path]): - """Load the id to name map for volumes and surfaces. + """Load the ID to name map for volumes and surfaces. Parameters ---------- filename : Union[str, pathlib.Path] - Filename of the id to name map (JSON). + Filename of the ID to the name map (JSON). """ with open(filename, "r") as f: data = json.load( @@ -831,23 +831,23 @@ def validate_ids_to_name_map(self): duplicate_surface_names = self._get_duplicate_surface_names() if len(unmapped_volumes) > 0 or len(unmapped_surfaces) > 0: - LOGGER.debug(f"Volume ids {unmapped_volumes} not associated with a volume name.") - LOGGER.debug(f"Surface ids {unmapped_surfaces} not associated with a surface name.") + LOGGER.debug(f"Volume IDs {unmapped_volumes} are not associated with a volume name.") + LOGGER.debug(f"Surface IDs {unmapped_surfaces} are not associated with a surface name.") return False if len(duplicate_surface_names) > 0 or len(duplicate_volume_names) > 0: - LOGGER.debug(f"Volume names {duplicate_volume_names} occur more than once") + LOGGER.debug(f"Volume names {duplicate_volume_names} occur more than once.") LOGGER.debug(f"Surface names {duplicate_surface_names} occur more than once") return False else: return True def clean(self, ignore_nans_in_point_average: bool = False, **kwargs): - """Merge duplicate points and return cleaned copy. + """Merge duplicate points and return a cleaned copy. Parameters ---------- - ignore_nans_in_point_average : bool, optional - Flag indicating whether to ignore nan values when averaging point data, by default False + ignore_nans_in_point_average : bool, default: False + Whether to ignore nan values when averaging point data. Returns ------- @@ -880,19 +880,19 @@ def add_volume(self, volume: pv.UnstructuredGrid, id: int = None, name: str = No Parameters ---------- volume : pv.PolyData - PolyData representation of the volume to add + PolyData representation of the volume to add. id : int - ID of the volume to be added. This id will be tracked as "_volume-id" - name : str, optional - Name of the added volume, by default None (not tracked) + ID of the volume to add. This ID is tracked as ``_volume-id`` + name : str, default: None + Name of the added volume. The added value is not tracked by default. """ if not id: if "_volume-id" not in volume.cell_data.keys(): - LOGGER.debug("Failed to set _volume-id") + LOGGER.debug("Failed to set '_volume-id'.") return None else: if not isinstance(id, int): - LOGGER.debug("sid should by type int.") + LOGGER.debug("'sid' should be an integer.") return None volume.cell_data["_volume-id"] = np.ones(volume.n_cells, dtype=float) * id @@ -914,29 +914,28 @@ def add_surface( Parameters ---------- surface : pv.PolyData - PolyData representation of the surface to add + PolyData representation of the surface to add. sid : int - ID of the surface to be added. This id will be tracked as "_surface-id" - name : str, optional - Name of the added surface, by default None (not tracked) - overwrite_existing : bool, optional - Flag indicating whether to overwrite/append a surface with the same id, by default False + ID of the surface to add. This ID is tracked as ``_surface-id``. + name : str, default: None + Name of the added surface. The added surface is not tracked by default. + overwrite_existing : bool, default: False + Whether to overwrite a surface with the same ID. If ``False``, the added + surface is appended. """ if not id: if "_surface-id" not in surface.cell_data.keys(): - LOGGER.error("Failed to set _surface-id") + LOGGER.error("Failed to set '_surface-id'.") return None else: if not isinstance(id, int): - LOGGER.error("sid should by type int.") + LOGGER.error("'sid' should be an integer.") return None surface.cell_data["_surface-id"] = np.ones(surface.n_cells, dtype=float) * id if not overwrite_existing: if id in self.surface_ids: - LOGGER.error( - f"{id} already used. Please pick any id other than {self.surface_ids}." - ) + LOGGER.error(f"{id} is already used. Pick any ID other than {self.surface_ids}.") return None self_copy = self._add_mesh(surface, keep_data=True, fill_float=np.nan) @@ -952,17 +951,17 @@ def add_lines(self, lines: pv.PolyData, id: int = None): Parameters ---------- lines : pv.PolyData - PolyData representation of the lines to add + PolyData representation of the lines to add. id : int - ID of the surface to be added. This id will be tracked as "_line-id" + ID of the surface to add. This ID is tracked as ``_line-id``. """ if not id: if "_line-id" not in lines.cell_data.keys(): - LOGGER.error("Failed to set _surface-id") + LOGGER.error("Failed to set '_surface-id'.") return None else: if not isinstance(id, int): - LOGGER.error("sid should by type int.") + LOGGER.error("'sid' should be an integer.") return None lines.cell_data["_line-id"] = np.ones(lines.n_cells, dtype=float) * id @@ -970,24 +969,24 @@ def add_lines(self, lines: pv.PolyData, id: int = None): return self_copy def get_volume(self, sid: int) -> pv.UnstructuredGrid: - """Get a volume as a UnstructuredGrids object.""" + """Get a volume as an ``UnstructuredGrid`` object.""" return self._get_submesh(sid, scalar="_volume-id") def get_volume_by_name(self, name: str) -> pv.UnstructuredGrid: - """Get the surface associated with `name`.""" + """Get the surface associated with ``name``.""" if name not in list(self._volume_name_to_id.keys()): - LOGGER.error(f"No volume associated with {name}") + LOGGER.error(f"No volume is associated with {name}.") return None volume_id = self._volume_name_to_id[name] return self.get_volume(volume_id) def get_surface(self, sid: int) -> Union[pv.PolyData, SurfaceMesh]: # ?: Return SurfaceMesh instead of PolyData? - """Get a surface as PolyData object. + """Get a surface as a ``PolyData`` object. Notes ----- - Tries to return a SurfaceMesh object that also contains a name and id. + This method tries to return a ``SurfaceMesh`` object that also contains a name, ID, and additional convenience properties. """ if sid in list(self._surface_id_to_name.keys()): @@ -1001,53 +1000,53 @@ def get_surface(self, sid: int) -> Union[pv.PolyData, SurfaceMesh]: def get_surface_by_name(self, name: str) -> Union[pv.PolyData, SurfaceMesh]: # ?: Return SurfaceMesh instead of PolyData? - """Get the surface associated with `name`.""" + """Get the surface associated with ``name``.""" if name not in list(self._surface_name_to_id.keys()): - LOGGER.error(f"No surface associated with {name}") + LOGGER.error(f"No surface is associated with {name}.") return None surface_id = self._surface_name_to_id[name] return self.get_surface(surface_id) def get_lines(self, sid: int) -> pv.PolyData: - """Get lines as a PolyData object.""" + """Get lines as a ``PolyData`` object.""" return self._get_submesh(sid, scalar="_line-id").extract_surface() def remove_surface(self, sid: int): - """Remove a surface with id. + """Remove a surface with a given ID. Parameters ---------- sid : int - Id of surface to remove. + ID of the surface to remove. """ mask = self.cell_data["_surface-id"] == sid return self.remove_cells(mask, inplace=True) def remove_volume(self, vid: int): - """Remove a volume with id. + """Remove a volume with a given ID. Parameters ---------- vid : int - Id of volume to remove. + ID of the volume to remove. """ mask = self.cell_data["_volume-id"] == vid return self.remove_cells(mask, inplace=True) def remove_lines(self, lid: int): - """Remove a set of lines with id. + """Remove a set of lines with a given ID. Parameters ---------- lid : int - Id of lines to remove. + ID of the lines to remove. """ - mask = self.cell_data["_volume-id"] == lid + mask = self.cell_data["_line-id"] == lid return self.remove_cells(mask, inplace=True) class _ConductionType(Enum): - """Enum containing type of conduction system.""" + """Enum containing types of conduction systems.""" LEFT_PURKINJE = "Left-purkinje" """Left Purkinje network.""" @@ -1066,25 +1065,25 @@ class _ConductionType(Enum): class _BeamsMesh(Mesh): - """Mesh class: inherits from Mesh. + """Mesh class that inherits from ``Mesh``. Notes ----- - This class inherits from Mesh and adds additional + This class inherits from ``Mesh and adds additional attributes and convenience methods for enhanced functionality. Lines of the same component are - tracked as _line_id, connections to the volume mesh are tracked using the pointdata field - _is-connected. + tracked as ``_line_id``. Connections to the volume mesh are tracked using the pointdata field + ``_is-connected``. """ def __init__(self, *args): super().__init__(*args) self._line_id_to_name: dict = {} - """line id to name map.""" + """Line ID to name map.""" self.ep_material: dict = {} - """Ep material map.""" + """EP material map.""" self._line_id_to_pid: dict = {} - """line id to part id map.""" + """Line ID to part ID map.""" pass def _get_submesh( @@ -1092,7 +1091,7 @@ def _get_submesh( ) -> pv.PolyData: # NOTE: extract_cells cleans the object, removing any unused points. if scalar not in self.cell_data.keys(): - LOGGER.debug(f"{scalar} does not exist in cell_data") + LOGGER.debug(f"{scalar} does not exist in 'cell_data'.") return None mask = np.isin(self.cell_data[scalar], sid) self._set_global_ids() @@ -1109,14 +1108,14 @@ def _add_mesh( Notes ----- - Adding the mesh is always in-place + Adding the mesh is always in place. Parameters ---------- mesh_input : pv.PolyData | pv.UnstructuredGrid - Mesh to add, either PolyData or UnstructuredGrid - keep_data : bool, optional - Flag specifying whether to try to keep mesh point/cell data, by default True + Mesh to add, which is either ``PolyData`` or ``UnstructuredGrid``. + keep_data : bool, default: True + Whether to try to keep mesh point/cell data. """ mesh = copy.copy(mesh_input) if keep_data: @@ -1149,7 +1148,7 @@ def _add_mesh( return self def get_unique_lines_id(self) -> int: - """Get unique lines id.""" + """Get unique lines ID.""" new_id: int if "_line-id" not in self.cell_data.keys(): new_id = 1 @@ -1163,17 +1162,17 @@ def add_lines(self, lines: pv.PolyData, id: int = None, name: str = None): Parameters ---------- lines : pv.PolyData - PolyData representation of the lines to add + PolyData representation of the lines to add. id : int - ID of the surface to be added. This id will be tracked as "_line-id" + ID of the surface to add. This ID is tracked as ``_line-id``. name: str - Name of the lines to add + Name of the lines to add. """ if not id: return None else: if not isinstance(id, int): - LOGGER.debug("sid should by type int.") + LOGGER.debug("'sid' should be an integer.") return None lines.cell_data["_line-id"] = np.ones(lines.n_cells, dtype=float) * id if "_is-connected" not in lines.point_data.keys(): @@ -1185,14 +1184,14 @@ def add_lines(self, lines: pv.PolyData, id: int = None, name: str = None): return self_copy def get_line_id_from_name(self, name: str) -> int: - """Get line id from name using the `_line_id_to_name` attribute.""" + """Get the line ID from name using the ``_line_id_to_name`` attribute.""" position_in_list = list(self._line_id_to_name.values()).index(name) line_id = list(self._line_id_to_name.keys())[position_in_list] return line_id def get_lines_by_name(self, name: str) -> pv.PolyData: # ?: Return SurfaceMesh instead of PolyData? - """Get the lines associated with `name`.""" + """Get the lines associated with the ``name` attribute.""" if name not in list(self._line_id_to_name.values()): LOGGER.error(f"No lines associated with {name}") return None @@ -1200,7 +1199,7 @@ def get_lines_by_name(self, name: str) -> pv.PolyData: return self.get_lines(line_id) def get_lines(self, sid: int) -> pv.PolyData: - """Get lines as a PolyData object.""" + """Get lines as a ``PolyData`` object.""" return self._get_submesh(sid, scalar="_line-id").extract_surface() @@ -1220,7 +1219,7 @@ class Part: @property def surfaces(self) -> List[SurfaceMesh]: - """List of surfaces belonging to part.""" + """List of surfaces belonging to the part.""" surfaces = [] for key, value in self.__dict__.items(): if isinstance(value, SurfaceMesh): @@ -1229,7 +1228,7 @@ def surfaces(self) -> List[SurfaceMesh]: @property def surface_names(self) -> List[str]: - """List of surface names belonging to part.""" + """List of surface names belonging to the part.""" surface_names = [] for key, value in self.__dict__.items(): if isinstance(value, SurfaceMesh): @@ -1237,7 +1236,7 @@ def surface_names(self) -> List[str]: return surface_names def get_point(self, pointname: str) -> Point: - """Get point from part.""" + """Get point from the part.""" for point in self.points: if point.name == pointname: return point @@ -1246,15 +1245,15 @@ def get_point(self, pointname: str) -> Point: def __init__(self, name: str = None, part_type: PartType = PartType.UNDEFINED) -> None: self.name = name - """Name of the part.""" + """Part name.""" self.pid = None """Part ID.""" self.mid = None - """Material id associated with part.""" + """Material ID associated with the part.""" self.part_type: PartType = part_type """Type of the part.""" self.element_ids: np.ndarray = np.empty((0, 4), dtype=int) - """Array holding element ids that make up this part.""" + """Array holding element IDs that make up the part.""" self.points: List[Point] = [] """Points of interest belonging to the part.""" self.caps: List[Cap] = [] @@ -1262,20 +1261,20 @@ def __init__(self, name: str = None, part_type: PartType = PartType.UNDEFINED) - self.cavity: Cavity = None self.fiber: bool = False - """If this part has fiber/sheet data.""" + """Flag indicating if the part has fiber/sheet data.""" self.active: bool = False - """If active stress will be established.""" + """Flag indicating if active stress is established.""" self.meca_material: MechanicalMaterialModel = MechanicalMaterialModel.DummyMaterial() - """Material model will be assiggned in Simulator.""" + """Material model is to be assiggned in the simulator.""" self.ep_material: EPMaterial = EPMaterial.DummyMaterial() - """EP Material model will be assiggned in Simulator.""" + """EP material model is to be assigned in the simulator.""" """Cavity belonging to the part.""" if self.part_type in [PartType.VENTRICLE]: self.apex_points: List[Point] = [] - """Points on apex.""" + """Points on the apex.""" self._add_surfaces() @@ -1303,7 +1302,7 @@ def _add_septum_part(self): return def _get_info(self): - """Get part info in order to reconstruct from a mesh file.""" + """Get part information to reconstruct from a mesh file.""" info = { self.name: { "part-id": self.pid, diff --git a/src/ansys/health/heart/post/auto_process.py b/src/ansys/health/heart/post/auto_process.py index 417f6eef7..c81940314 100644 --- a/src/ansys/health/heart/post/auto_process.py +++ b/src/ansys/health/heart/post/auto_process.py @@ -40,21 +40,21 @@ def zerop_post(directory: str, model: HeartModel) -> tuple[dict, np.ndarray, np.ndarray]: - """Post-process zeropressure folder. + """Postprocess the zero-pressure folder. Parameters ---------- directory : str - Path to simulation folder + Path to the simulation folder. model : HeartModel - model to post-process + Model to postprocess. Returns ------- tuple[dict, np.ndarray, np.ndarray] - dictionary with convergence information - stress free configuration - computed end-of-diastolic configuration + Dictionary with convergence information, + stress free configuration, and + computed end-of-diastolic configuration. """ folder = "post" os.makedirs(os.path.join(directory, folder), exist_ok=True) @@ -124,7 +124,7 @@ def zerop_post(directory: str, model: HeartModel) -> tuple[dict, np.ndarray, np. # save left ventricle in json dct["Left ventricle EOD pressure (mmHg)"] = lv_pr_mmhg dct["True left ventricle volume (mm3)"] = true_lv_ed_volume - dct["Simulation Left ventricle volume (mm3)"] = lv_volumes + dct["Simulation left ventricle volume (mm3)"] = lv_volumes # Klotz curve information klotz = EDPVR(true_lv_ed_volume / 1000, lv_pr_mmhg) @@ -144,14 +144,14 @@ def zerop_post(directory: str, model: HeartModel) -> tuple[dict, np.ndarray, np. def mech_post(directory: str, model: HeartModel) -> None: - """Post-process mechanical simulation folder. + """Postprocess the mechanical simulation folder. Parameters ---------- directory : str - d3plot folder + Path to the d3plot folder. model : HeartModel - heart model + Heart model. """ last_cycle_duration = 800 folder = "post" diff --git a/src/ansys/health/heart/post/dpf_utils.py b/src/ansys/health/heart/post/dpf_utils.py index 80aedab6d..62f01fbe8 100644 --- a/src/ansys/health/heart/post/dpf_utils.py +++ b/src/ansys/health/heart/post/dpf_utils.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""D3plot parser using Ansys-dpf.""" +"""D3plot parser using Ansys DPF.""" import os from pathlib import Path @@ -36,7 +36,7 @@ from ansys.health.heart.models import HeartModel _SUPPORTED_DPF_SERVERS = ["2024.1", "2024.1rc1", "2024.2rc0"] -"""List of supported DPF Servers.""" +"""List of supported DPF servers.""" #! NOTE: #! 2024.1rc0: not supported due to missing ::tf operator #! => 2024.2rc1: not supported due to bug in dpf server when reading d3plots mixed with EM results @@ -48,14 +48,14 @@ def _check_accept_dpf(): else: LOGGER.error( """DPF requires you to accept the license agreement. - Please set the environment variable "ANSYS_DPF_ACCEPT_LA" to "Y".""" + Set the environment variable "ANSYS_DPF_ACCEPT_LA" to "Y".""" ) exit() return class D3plotReader: - """Use DPF to parse d3plot.""" + """Use DPF to parse the d3plot.""" def __init__(self, path: Path): """ @@ -64,7 +64,7 @@ def __init__(self, path: Path): Parameters ---------- path : Path - d3plot file path. + Path to the d3plot file. """ _check_accept_dpf() @@ -77,13 +77,13 @@ def __init__(self, path: Path): for version, server in available_dpf_servers.items(): if version in _SUPPORTED_DPF_SERVERS: - LOGGER.info(f"Trying to launch DPF Server {version}") + LOGGER.info(f"Trying to launch DPF server {version}.") self._server = server() break if self._server is None: - mess = f"""Failed to launch supported DPF Server: - Please make sure one of {_SUPPORTED_DPF_SERVERS} is installed.""" + mess = f"""Failed to launch supported DPF server: + Make sure one of {_SUPPORTED_DPF_SERVERS} is installed.""" LOGGER.error(mess) raise SupportedDPFServerNotFoundError(mess) @@ -113,7 +113,7 @@ def get_ep_fields(self, at_step: int = None) -> dpf.FieldsContainer: # NOTE: to get time steps: # self.model.metadata.time_freq_support.time_frequencies.data_as_list - op = dpf.Operator("lsdyna::ms::results") # ls dyna EP operator + op = dpf.Operator("lsdyna::ms::results") # LS-DYNA EP operator op.inputs.data_sources(self.ds) op.inputs.time_scoping(time_scoping) fields = op.eval() @@ -175,24 +175,24 @@ def print_lsdyna_ms_results(self) -> None: return def get_displacement_at(self, time: float) -> np.ndarray: - """Get displacement field. + """Get the displacement field. Parameters ---------- time : float - at which time + Time at which to get the displacement field. Returns ------- np.ndarray - displacement + Displacement array. """ if time not in self.time: - LOGGER.warning("No data at given time, results are from interpolation.") + LOGGER.warning("No data available at given time. Results are from interpolation.") return self.model.results.displacement.on_time_scoping(float(time)).eval()[0].data def get_material_ids(self) -> np.ndarray: - """Get list of material id.""" + """Get a list of the material IDs.""" return self.model.metadata.meshed_region.elements.materials_field.data def get_history_variable( @@ -201,14 +201,14 @@ def get_history_variable( at_step: int = 0, ) -> np.ndarray: """ - Get history variables in d3plot. + Get history variables in the d3plot. Parameters ---------- hv_index: List[int] History variables index. - at_step: int, optional - At this frame, by default 0. + at_step: int, default: 0 + Step at which to get the history variables. Returns ------- @@ -217,8 +217,8 @@ def get_history_variable( Notes ----- - d3plot.get_history_variable(hv_index=list(range(9)), at_frame=at_frame) to - get Deformation gradient (column-wise storage),see MAT_295 in LS-DYNA manual. + d3plot.get_history_variable(hv_index=list(range(9)), at_frame=at_frame). To + get the deformation gradient (column-wise storage), see MAT_295 in the LS-DYNA manuals. """ if at_step > self.model.metadata.time_freq_support.n_sets: @@ -238,17 +238,17 @@ def get_history_variable( return np.array(res) def get_heatflux(self, step: int = 2) -> np.ndarray: - """Get nodal heat flux vector from d3plot. + """Get nodal heat flux vector from the d3plot. Parameters ---------- - step : int, optional - time step, by default 2 + step : int, default: 2 + Time step Returns ------- np.ndarray - heat flux + Heat flux. """ op = dpf.Operator("lsdyna::d3plot::TF") op.inputs.data_sources(self.ds) @@ -258,15 +258,15 @@ def get_heatflux(self, step: int = 2) -> np.ndarray: class ICVoutReader: - """Read control volume data from binout.""" + """Read control volume data from the binout file.""" def __init__(self, fn: str) -> None: - """Init reader. + """Initialize reader. Parameters ---------- fn : str - binout file path + Path to the binout file. """ _check_accept_dpf() self._ds = dpf.DataSources() @@ -278,7 +278,7 @@ def __init__(self, fn: str) -> None: exit() def _get_available_ids(self) -> np.ndarray: - """Get available CV ids and CVI ids.""" + """Get available CV IDs and CVI IDs.""" icvout_op = dpf.Operator("lsdyna::binout::ICV_ICVIID") icvout_op.inputs.data_sources(self._ds) fields1 = icvout_op.outputs.results() @@ -317,15 +317,15 @@ def get_pressure(self, icv_id: int) -> np.ndarray: Parameters ---------- icv_id : int - control volume id + Control volume ID. Returns ------- np.ndarray - pressure array + Pressure array. """ if icv_id not in self._icv_ids: - raise ValueError("icv_id not found.") + raise ValueError("'icv_id' is not found.") return self._get_field(icv_id, "ICV_P") @@ -335,15 +335,15 @@ def get_volume(self, icv_id: int) -> np.ndarray: Parameters ---------- icv_id : int - control volume id + Control volume ID. Returns ------- np.ndarray - volume array + Volume array. """ if icv_id not in self._icv_ids: - raise ValueError("icv_id not found.") + raise ValueError("'icv_id' not found.") v = self._get_field(icv_id, "ICV_V") # MPP bug: volume is zero at t0 @@ -358,15 +358,15 @@ def get_flowrate(self, icvi_id: int) -> np.ndarray: Parameters ---------- icvi_id : int - control volume interaction id + Control volume interaction ID. Returns ------- np.ndarray - flowrate array + Flow rate array. """ if icvi_id not in self._icvi_ids: - raise ValueError("icvi_id not found.") + raise ValueError("'icvi_id' is not found.") # area is obtained by 'ICVI_A' return self._get_field(icvi_id, "ICVI_FR") @@ -384,7 +384,7 @@ def _get_field(self, id: int, operator_name: str) -> np.ndarray: class EPpostprocessor: - """Postprocess Electrophysiology results.""" + """Postprocess EP (Electrophysiology) results.""" def __init__(self, results_path: Path, model: HeartModel = None): """Postprocess EP results. @@ -406,7 +406,7 @@ def load_ep_fields(self): self.fields = self.reader.get_ep_fields() def get_activation_times(self, at_step: int = None): - """Get activation times field.""" + """Get the field with activation times.""" step = ( self.reader.model.metadata.time_freq_support.time_frequencies.scoping.ids[-1] if at_step is None @@ -456,7 +456,7 @@ def _get_ep_field(self, variable_id: int, node_id=None, plot: bool = False): return phi, times def read_ep_nodout(self): - """Read Electrophysiology results.""" + """Read EP results.""" em_nodout_path = os.path.join(self.results_path, "em_nodout_EP_001.dat") with open(em_nodout_path, "r") as f: lines = f.readlines() @@ -490,7 +490,7 @@ def read_ep_nodout(self): self._assign_pointdata(pointdata=self.activation_time, node_ids=self.node_ids) def create_post_folder(self, path: Path = None): - """Create Postprocessing folder.""" + """Create postprocessing folder.""" if path is None: post_path = os.path.join(os.path.dirname(self.reader.ds.result_files[0]), "post") else: @@ -502,7 +502,7 @@ def create_post_folder(self, path: Path = None): return post_path def animate_transmembrane(self): - """Animate transmembrane potentials and export to vtk.""" + """Animate transmembrane potentials and export to VTK.""" vm, times = self.get_transmembrane_potential() # Creating scene and loading the mesh post_path = self.create_post_folder() @@ -520,7 +520,7 @@ def animate_transmembrane(self): return def export_transmembrane_to_vtk(self): - """Export transmembrane potentials to vtk.""" + """Export transmembrane potentials to VTK.""" vm, times = self.get_transmembrane_potential() post_path = self.create_post_folder() grid = self.reader.meshgrid.copy() @@ -567,7 +567,7 @@ def compute_ECGs(self, electrodes: np.ndarray): # noqa: N802 return ecgs, times def read_ECGs(self, path: Path): # noqa: N802 - """Read ECG text file produced by LS-DYNA simulation.""" + """Read ECG text file produced by the LS-DYNA simulation.""" data = np.loadtxt(path, skiprows=4) times = data[:, 0] ecgs = data[:, 1:11] @@ -586,16 +586,16 @@ def compute_12_lead_ECGs( # noqa: N802 ---------- ECGs : np.ndarray mxn array containing ECGs, where m is the number of time steps - and n the 10 electrodes in this order: - "V1" "V2" "V3" "V4" "V5" "V6" "RA" "LA" "RL" "LL" - plot : bool, optional - plot option, by default True + and n is 10 electrodes in this order: + ''V1'' ''V2'' ''V3'' ''V4'' ''V5'' ''V6'' ''RA'' ''LA'' ''RL'' ''LL'' + plot : bool, default: True + Whether to plot. Returns ------- np.ndarray - 12-Lead ECGs in this order: - "I" "II" "III" "aVR" "aVL" "aVF" "V1" "V2" "V3" "V4" "V5" "V6" + 12-lead ECGs in this order: + ``I`` ``II`` ``III`` ``aVR`` ``aVL`` ``aVF`` ``V1`` ``V2`` ``V3`` ``V4`` ``V5`` ``V6`` """ right_arm = ECGs[:, 6] left_arm = ECGs[:, 7] @@ -682,42 +682,42 @@ def compute_12_lead_ECGs( # noqa: N802 return ecg_12lead def _assign_pointdata(self, pointdata: np.ndarray, node_ids: np.ndarray): - """Assign point data to mesh.""" + """Assign point data to the mesh.""" result = np.zeros(self.mesh.n_points) result[node_ids - 1] = pointdata self.mesh.point_data["activation_time"] = result class D3plotToVTKExporter: - """Read d3plot and save deformed mesh.""" + """Read d3plot and save the deformed mesh.""" def __init__(self, d3plot_file: str, t_to_keep: float = 10.0e10) -> None: - """Init. + """Initialize. Parameters ---------- d3plot_file : str - d3plot file path - t_to_keep : float, optional - time to be converted, by default 10.0e10 + Path to the d3plot file. + t_to_keep : float, default: 10.0e10 + Time to convert. """ self.data = D3plotReader(d3plot_file) self.save_time = self.data.time[self.data.time >= self.data.time[-1] - t_to_keep] def convert_to_pvgrid_at_t(self, time: float, fname: str = None) -> pv.UnstructuredGrid: - """Convert d3plot data into pyvista UnstructuredGrid. + """Convert d3plot data into a PyVista ``UnstructuredGrid`` object. Parameters ---------- time : float - time to convert - fname : str - filename to be save save data, default is None + Time to convert. + fname : str, default: None + Name of file to be save data to. Returns ------- pv.UnstructuredGrid - result in pyvista object + Result in PyVista object. """ mesh = self.data.meshgrid.copy() i_frame = np.where(self.data.time == time)[0][0] diff --git a/src/ansys/health/heart/post/klotz_curve.py b/src/ansys/health/heart/post/klotz_curve.py index a7d6747c5..1f9361fbf 100644 --- a/src/ansys/health/heart/post/klotz_curve.py +++ b/src/ansys/health/heart/post/klotz_curve.py @@ -40,12 +40,12 @@ class EDPVR: Bn = 2.76 # mmHg def __init__(self, vm: float, pm: float): - """Init Klotz curve with End diastolic volume and pressure. + """Initialize Klotz curve with end diastolic volume and pressure. Parameters ---------- vm : float - Volume in mL + Volume in mL. pm : float Pressure in mmHg """ @@ -73,12 +73,12 @@ def get_pressure(self, volume: float | np.ndarray) -> float | np.ndarray: Parameters ---------- volume : float | np.ndarray - volume in mL + Volume in mL. Returns ------- float| np.ndarray - pressure in mmHg + Pressure in mmHg. """ return self.Alpha * volume**self.Beta @@ -88,15 +88,15 @@ def get_volume(self, pressure: np.ndarray) -> np.ndarray: Parameters ---------- pressure : np.ndarray - pressure in mmHg + Pressure in mmHg. Returns ------- np.ndarray - volume in mmL + Volume in mL. """ if not isinstance(pressure, np.ndarray): - raise TypeError("Input must be 1-dimensioanl np.array.") + raise TypeError("Input must be one-dimensional np.array.") volume = np.zeros(pressure.shape) for i, p in enumerate(pressure): volume[i] = (p / self.Alpha) ** (1 / self.Beta) @@ -106,17 +106,17 @@ def get_volume(self, pressure: np.ndarray) -> np.ndarray: return volume def plot_EDPVR(self, simulation_data: list = None) -> matplotlib.figure.Figure: # noqa: N802 - """Plot klotz curve, with simulation data if exists. + """Plot Llotz curve with simulation data if it exists. Parameters ---------- - simulation_data : list, optional - [volume, pressure] from simulation, by default None + simulation_data : list, default: None + ``[volume, pressure]`` from simulation. Returns ------- matplotlib.figure.Figure - figure + Figure. """ vv = np.linspace(0, 1.1 * self.vm, num=101) pp = self.get_pressure(vv) diff --git a/src/ansys/health/heart/post/laplace_post.py b/src/ansys/health/heart/post/laplace_post.py index 5aaa3b2ad..bd254c04f 100644 --- a/src/ansys/health/heart/post/laplace_post.py +++ b/src/ansys/health/heart/post/laplace_post.py @@ -41,14 +41,16 @@ def read_laplace_solution( Parameters ---------- directory : str - directory of d3plot files + Directory of d3plot files. field_list : list[str] - name of each d3plot file/field + Name of each d3plot file/field. + read_heatflux : bool, default: False + Whether to read heatflux. Returns ------- pv.UnstructuredGrid - grid with point data of each field + Grid with point data of each field. """ data = D3plotReader(os.path.join(directory, field_list[0] + ".d3plot")) grid: pv.UnstructuredGrid = data.model.metadata.meshed_region.grid @@ -60,7 +62,7 @@ def read_laplace_solution( t = t elif len(t) == 3 * grid.n_points: LOGGER.warning( - "DPF reads temperature as a vector field, but expecting a scalar field.\ + "DPF reads temperature as a vector field but is expecting a scalar field.\ Consider updating the DPF server." ) t = t[::3] @@ -77,26 +79,26 @@ def read_laplace_solution( return grid.copy() -@deprecated(reason="transmural direction can be automatically read by d3plot heat flux.") +@deprecated(reason="Transmural direction can be automatically read by d3plot heat flux.") def update_transmural_by_normal(grid: pv.UnstructuredGrid, surface: pv.PolyData) -> np.ndarray: """Use surface normal for transmural direction. Note ---- - Assume mesh is coarse compared to the thinkness, solid cell normal - is interpolated from closest surface normal + Assume mesh is coarse compared to the thickness. Solid cell normal + is interpolated from closest surface normal. Parameters ---------- grid : pv.UnstructuredGrid - atrium grid + Atrium grid. surface : pv.PolyData - atrium endocardium surface + Atrium endocardium surface. Returns ------- np.ndarray - cell transmural direction vector + Cell transmural direction vector. """ surface_normals = surface.clean().compute_normals() @@ -120,21 +122,21 @@ def orthogonalization( Parameters ---------- grad_trans : np.ndarray - transmural vector + Transmural vector. k : np.ndarray - Bundle selection vector + Bundle selection vector. Returns ------- tuple[np.ndarray, np.ndarray, np.ndarray] - local coordinate system e_l,e_n,e_t + Local coordinate system ``e_l, e_n, e_t``. """ norm = np.linalg.norm(grad_trans, axis=1) bad_cells = np.argwhere(norm == 0).ravel() LOGGER.debug( f"{len(bad_cells)} cells have null gradient in transmural direction." - f" This should only be at valve regions and can be checked from the vtk file." + f" This should only be at valve regions and can be checked from the VTK file." ) norm = np.where(norm != 0, norm, 1) @@ -159,29 +161,30 @@ def compute_la_fiber_cs( Parameters ---------- directory : str - directory of d3plot files. + Directory of d3plot files. settings : AtrialFiber Atrial fiber settings. - endo_surface : pv.PolyData, optional - _description_, by default None - If given, normal direction will be updated by surface normal instead of Laplace solution. + endo_surface : pv.PolyData, default: None + _description_. If given, normal direction is updated by the surface + normal instead of the Laplace solution. Notes ----- - Method descrbed in https://doi.org/10.1016/j.cma.2020.113468 + This method is described in `Modeling cardiac muscle fibers in ventricular and + atrial electrophysiology simulations `_. Returns ------- pv.UnstructuredGrid - pv object with fiber coordinates system. + PV object with fiber coordinates system. """ def bundle_selection(grid): """Left atrium bundle selection. - Add two cell data to grid. - - 'k' is unit vector from different gradient fields. - - 'bundle' labels regions of selection. + Add two-cell data to grid. + - 'k' is the unit vector from different gradient fields. + - 'bundle' labels the regions of selection. """ # bundle selection @@ -237,29 +240,30 @@ def compute_ra_fiber_cs( Parameters ---------- directory : str - directory of d3plot files. + Directory of d3plot files. settings : AtrialFiber Atrial fiber settings. - endo_surface : pv.PolyData, optional - _description_, by default None - If given, normal direction will be updated by surface normal instead of Laplace solution. + endo_surface : pv.PolyData, default: None + _description_. If given, normal direction is updated by the surface normal + instead of the Laplace solution. Notes ----- - Method descrbed in https://doi.org/10.1016/j.cma.2020.113468 + This method is described in `Modeling cardiac muscle fibers in ventricular and + atrial electrophysiology simulations `_. Returns ------- pv.UnstructuredGrid - pv object with fiber coordinates system. + PV object with the fiber coordinates system. """ def bundle_selection(grid): """Right atrium bundle selection. - Add two cell data to grid. - - 'k' is unit vector from different gradient fields. - - 'bundle' labels regions of selection. + Add two-cell data to grid. + - 'k' is the unit vector from different gradient fields. + - 'bundle' labels the regions of selection. """ tao_tv = settings.tau_tv # 0.9 @@ -376,18 +380,18 @@ def set_rotation_bounds( Parameters ---------- w : np.ndarray - intra-ventricular interpolation weight if outflow_tracts is not None + Intra-ventricular interpolation weight if ``outflow_tracts`` is not ``None``. endo : float - rotation angle at endocardium + Rotation angle at endocardium. epi : float - rotation angle at epicardium - outflow_tracts : list[float, float], optional - rotation angle of enendocardium do and epicardium on outflow tract, by default None + Rotation angle at epicardium. + outflow_tracts : list[float, float], default: None + Rotation angle of enendocardium do and epicardium on outflow tract. Returns ------- tuple[np.ndarray, np.ndarray] - cell-wise rotation bounds for endocardium and epicardium + Cell-wise rotation bounds for endocardium and epicardium. """ def _sigmoid(z): @@ -420,22 +424,22 @@ def compute_rotation_angle( Parameters ---------- grid : pv.UnstructuredGrid - mesh grid + Mesh grid. w : np.ndarray - intral ventricular interpolation weight + Intral ventricular interpolation weight. rotation : list[float, float] - rotation angles in degree at endocardium and epicardium - outflow_tracts : list[float, float], optional - rotation angle of enendocardium do and epicardium on outflow tract, by default None + Rotation angles in degree at endocardium and epicardium. + outflow_tracts : list[float, float], default: None + Rotation angle of enendocardium do and epicardium on outflow tract. Returns ------- np.ndarray - cell-wise rotation angles + Cell-wise rotation angles. Note ---- - Compute for all cells, but filtered by left/right mask outside of this function. + Compute for all cells, but filter by left/right mask outside of this function. """ rot_endo, rot_epi = set_rotation_bounds(w, rotation[0], rotation[1], outflow_tracts) @@ -457,23 +461,27 @@ def compute_ventricle_fiber_by_drbm( }, left_only: bool = False, ) -> pv.UnstructuredGrid: - """D-RBM method described in https://doi.org/10.1016/j.cma.2020.113468. + """Compute the fiber coordinate system from Laplace solving. Parameters ---------- directory : str - directory of d3plot/tprint files. + Directory of d3plot/tprint files. settings : dict, optional - rotation angles, by default { "alpha_left": [-60, 60], "alpha_right": [-60, 60], - "alpha_ot": None, "beta_left": [-65, 25], "beta_right": [-65, 25], "beta_ot": None, } + Rotation angles. By default: ``{ "alpha_left": [-60, 60], "alpha_right": [-60, 60], + "alpha_ot": None, "beta_left": [-65, 25], "beta_right": [-65, 25], "beta_ot": None, }``. + left_only : bool, default: False + Whether to only compute fibers on the left ventricle. - left_only : bool, optional - only compute fibers on left ventricle, by default False + Notes + ----- + The D-RBM method is described in `Modeling cardiac muscle fibers in ventricular and + atrial electrophysiology simulations `_. Returns ------- pv.UnstructuredGrid - grid contains `fiber`,`cross-fiber`,`sheet` vectors + Grid contains ``fiber``, ``cross-fiber``, and ``sheet`` vectors. """ solutions = ["trans", "ab_l", "ot_l", "w_l"] if not left_only: diff --git a/src/ansys/health/heart/post/pvloop.py b/src/ansys/health/heart/post/pvloop.py index 42cc28e4a..fe8502540 100644 --- a/src/ansys/health/heart/post/pvloop.py +++ b/src/ansys/health/heart/post/pvloop.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Get pressure-volume (PV) loop from LS-DYNA icvout file.""" +"""Get pressure-volume (PV) loop from the LS-DYNA ICVOUT file.""" import os @@ -29,21 +29,21 @@ from ansys.health.heart.post.dpf_utils import ICVoutReader -# NOTE: assume control volume is defined with in this order +# NOTE: Assume control volume is defined in this order: CV_NAME = ["LV", "RV", "LA", "RA"] def write_pvloop_pngs(pressure: np.ndarray, volume: np.ndarray, save_to: str) -> None: - """Write pv loop figures into png. + """Write PV loop figures to PNG file. Parameters ---------- pressure : np.ndarray - pressure array + Pressure array. volume : np.ndarray - volume array + Volume array. save_to : str - folder to save + Directory to save the file to. """ n_cv = pressure.shape[0] for iframe in range(pressure.shape[1]): @@ -70,16 +70,16 @@ def write_pvloop_pngs(pressure: np.ndarray, volume: np.ndarray, save_to: str) -> def generate_pvloop(f: str, out_dir: str, t_to_keep: float = 800) -> None: - """Generate pv loop figures from icvout. + """Generate PV loop figures from the ICVOUT file. Parameters ---------- f : str - binout path + Path to the binout file. out_dir : str - folder to save + Directory to save the file to. t_to_keep : float, optional - Time to keep from the end, by default 800 (last heart beat) + Time to keep from the end. The default is ``800``, which is the last heart beat. """ icvout = ICVoutReader(f) n_cv = len(icvout._icv_ids) diff --git a/src/ansys/health/heart/post/strain_calculator.py b/src/ansys/health/heart/post/strain_calculator.py index 899ece18c..77c8ce061 100644 --- a/src/ansys/health/heart/post/strain_calculator.py +++ b/src/ansys/health/heart/post/strain_calculator.py @@ -36,18 +36,18 @@ class AhaStrainCalculator: - """Compute Longitudinal, Radial, Circumferential strain for left ventricle.""" + """Compute longitudinal, radial, and circumferential strain for the left ventricle.""" def __init__(self, model: HeartModel, d3plot_file): """ - Initialize AHA strain calculator. + Initialize the AHA strain calculator. Parameters ---------- model: HeartModel Heart model object. d3plot_file: Path.Path - d3plot header file path. + Path to the d3plot header file. """ self.model = model @@ -62,17 +62,17 @@ def _compute_thickness_lines(self, time_array: np.ndarray | list = None) -> list Parameters ---------- time_array : np.ndarray | list, optional - time array to export, by default d3plot time + Time array to export. The d3plot time is exported by default. Returns ------- list[pv.PolyData] - Polydata that has lines from nodes on the endocardium to nodes on the epicardium + Polydata that has lines from nodes on the endocardium to nodes on the epicardium. Notes ----- - Endocardium surfaces are supposed to be smooth - Artifact may occur on base (close to valves) region + Endocardium surfaces are supposed to be smooth. + Artifact may occur on base (close to valves) region. """ if time_array is None: time_array = self.d3plot.time @@ -121,17 +121,17 @@ def _compute_thickness( Parameters ---------- - time_array : _type_ - time array to export - surface_endo : pv.PolyData] - endocardium surface - surface_epi : pv.PolyData] - epicardium surface + time_array : np.ndarray + Time array to export. + surface_endo : pv.PolyData + Endocardium surface. + surface_epi : pv.PolyData + Epicardium surface. Returns ------- list[pv.PolyData] - thickness lines + Thickness lines. """ res = [] # assumes that corresponding points don't change in time @@ -157,18 +157,18 @@ def compute_aha_strain( Parameters ---------- - out_dir : str, optional - output folder, by default None - write_vtk : bool, optional - write into vtk files, by default False - t_to_keep : float, optional - time to stop, by default 10e10 + out_dir : str, default: None + Output folder. + write_vtk : bool, default: False + Whether to write to VTK files. + t_to_keep : float, default: 10e10 + Time to stop. Returns ------- np.ndarray - array of N_time * (1+17*3), columns represent time and - longitudinal, radial, and circumferential strain averaged of each segment + Array of N_time * (1+17*3). Columns represent time and + longitudinal, radial, and circumferential strain averaged of each segment. """ save_time = self.d3plot.time[self.d3plot.time >= self.d3plot.time[-1] - t_to_keep] strain = np.zeros((len(save_time), 1 + 17 * 3)) @@ -203,19 +203,19 @@ def compute_aha_strain( def compute_aha_strain_at(self, frame: int = 0, out_dir: pathlib.Path = None) -> np.ndarray: """ - Export AHA strain and/or save vtk file for a given frame. + Export AHA strain and/or save a VTK file for a given frame. Parameters ---------- - frame: int - at this frame, by default 0. - out_dir: pathlib.Path - folder where vtk files are saved, by default not save. + frame: int, default: 0 + Frame number to compute strain. + out_dir: pathlib.Path, default: None + Directory to save VTK file to. No VTK file is saved by default. Returns ------- np.ndarry - AHA LRC strain matrix (17 * 3) + AHA LRC strain matrix (17 * 3). """ element_lrc, aha_lrc, element_lrc_averaged = self._compute_myocardial_strain(frame) @@ -251,9 +251,9 @@ def _compute_myocardial_strain( Returns ------- - return1: [nelem * 3] elemental LRC strain - return2: [17 * 3] AHA17 LRC strain - return3: [nelem * 3] elemental LRC strain averaged from AHA17 + return1: [nelem * 3] elemental LRC strain. + return2: [17 * 3] AHA17 LRC strain. + return3: [nelem * 3] elemental LRC strain averaged from AHA17. """ if reference is not None: raise NotImplementedError @@ -303,27 +303,27 @@ def _compute_myocardial_strain( @staticmethod def bullseye_plot(ax, data, seg_bold=None, cmap=None, norm=None) -> None: - """Bullseye representation for the left ventricle. + """Plot bullseye representation for the left ventricle. Parameters ---------- ax : axes data : list of int and float - The intensity values for each of the 17 segments - seg_bold : list of int, optional - A list with the segments to highlight - cmap : ColorMap or None, optional - Optional argument to set the desired colormap - norm : Normalize or None, optional - Optional argument to normalize data into the [0.0, 1.0] range + Intensity values for each of the 17 segments. + seg_bold : list of int, default: None + List with the segments to highlight. + cmap : ColorMap or None, default: None + Optional argument to set the desired colormap. + norm : Normalize or None, default: None + Optional argument to normalize data into the [0.0, 1.0] range. Notes ----- This function creates the 17 segment model for the left ventricle according to the American Heart Association (AHA) [1]_ - Based on: - https://matplotlib.org/stable/gallery/specialty_plots/leftventricle_bulleye.html + It is based on `Left ventricle bullseye `_ + in the Matplotlib examples. References ---------- diff --git a/src/ansys/health/heart/post/system_model_post.py b/src/ansys/health/heart/post/system_model_post.py index c75e506e1..58162190a 100644 --- a/src/ansys/health/heart/post/system_model_post.py +++ b/src/ansys/health/heart/post/system_model_post.py @@ -65,11 +65,11 @@ class Volume: @dataclass class SystemState: """ - System state including pressure, flow, volume. + System state including pressure, flow, and volume. Notes ----- - future development + Future development. """ pressure: Pressure @@ -86,11 +86,11 @@ def __init__(self, csv_path: str, ed_state: list[float, float], name: str = ""): Parameters ---------- csv_path : str - CSV file path + Path to the CSV file. ed_state : list[float,float] - End of Diastole pressure and volume - name : str, optional - Cavity name, by default "" + End of diastole pressure and volume. + name : str, default: "" + Cavity name. """ self.name = name self.ed = ed_state @@ -119,15 +119,14 @@ def __init__(self, csv_path: str, ed_state: list[float, float], name: str = ""): @staticmethod def _integrate_volume(v0: float, t: np.ndarray, q: np.ndarray) -> np.ndarray: - """Integrate cavity's volume. + """Integrate the cavity's volume. Notes ----- - Cavity's volume is not evaluated/saved in csv file, this is to ensure - volume is consistent with what's in icvout. - - This assumes that the implicit solver with gamma=0.6 was used. + Cavity's volume is not evaluated/saved in the CSV file. This is to ensure + that volume is consistent with what's in the ICVOUT file. + This assumes that the implicit solver with ``gamma=0.6`` was used. Parameters ---------- @@ -141,7 +140,7 @@ def _integrate_volume(v0: float, t: np.ndarray, q: np.ndarray) -> np.ndarray: Returns ------- np.ndarray - Cavity volume + Cavity volume. """ gamma = 0.6 @@ -155,20 +154,20 @@ def _integrate_volume(v0: float, t: np.ndarray, q: np.ndarray) -> np.ndarray: class SystemModelPost: """ - Class for post-processing system model. + Postprocessing system model. Notes ----- - unit: ms, kPa, mL + The units are ms, kPa, and mL. """ def __init__(self, dir: str): - """Initialize SystemModelPost. + """Initialize ``SystemModelPost``. Parameters ---------- dir : str - Simulation directory + Simulation directory. """ self.dir = dir self.model_type = "LV" @@ -212,19 +211,19 @@ def __init__(self, dir: str): ) def get_ejection_fraction(self, t_start: float = 0, t_end: float = 10e10) -> float: - """Compute ejection fraction on given time interval. + """Compute ejection fraction on a given time interval. Parameters ---------- - t_start : float, optional - Start time, by default 0 - t_end : float, optional - End time, by default 10e10 + t_start : float, default: 0 + Start time. + t_end : float, default: 10e10 + End time. Returns ------- float - Ejection fraction + Ejection fraction. """ ef = [None, None] start = np.where(self.lv_system.time >= t_start)[0][0] @@ -252,19 +251,19 @@ def plot_pv_loop( Parameters ---------- - t_start : float, optional - Start time to plot, by default 0 - t_end : float, optional - End time to plot, by default 10e10 - show_ed : bool, optional - Whether to show the end of diastole state in zeroppressure, by default True - ef : list[float, float], optional - Show the ejection fraction in the legend, by default [None, None] + t_start : float, default: 0 + Start time to plot. + t_end : float, default: 10e10 + End time to plot. + show_ed : bool, default: True + Whether to show the end of the diastole state in zeroppressure. + ef : list[float, float], default: [None, None] + Ejection fraction to show in the legend. Returns ------- plt.Figure - Figrue handle + Figure handle. """ start = np.where(self.lv_system.time >= t_start)[0][0] end = np.where(self.lv_system.time <= t_end)[0][-1] diff --git a/src/ansys/health/heart/pre/conduction_beam.py b/src/ansys/health/heart/pre/conduction_beam.py index 55573a5cb..078c63a8c 100644 --- a/src/ansys/health/heart/pre/conduction_beam.py +++ b/src/ansys/health/heart/pre/conduction_beam.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Module containing class for creating conduxtion system.""" +"""Module containing class for creating conduction system.""" import networkx as nx import numpy as np @@ -80,7 +80,7 @@ def compute_sa_node(self, target_coord=None) -> Point: Compute SinoAtrial node. SinoAtrial node is defined on the endocardium of the right atrium and - between sup vena cava and inf vena cave. + between the sup vena cava and inf vena cava. """ if target_coord is None: sup_vcava_centroid = next( @@ -118,12 +118,12 @@ def compute_av_node(self, target_coord=None) -> Point: """ Compute Atrio-Ventricular node. - AtrioVentricular node is on right artrium endocardium surface and closest to septum. + Atrio-Ventricular node is on the right atrium endocardium surface and closest to the septum. Returns ------- Point - returns the AV node. + AV node. """ right_atrium_endo = self.m.mesh.get_surface(self.m.right_atrium.endocardium.id) @@ -152,19 +152,19 @@ def compute_av_node(self, target_coord=None) -> Point: return atrioventricular_point def compute_av_conduction(self) -> pv.PolyData: - """Compute Atrio-Ventricular conduction by means of beams following a geodesic path.""" + """Compute atrio-ventricular conduction by means of beams following a geodesic path.""" right_atrium_endo = self.m.mesh.get_surface(self.m.right_atrium.endocardium.id) try: sino_atrial_id = self.m.right_atrium.get_point("SA_node").node_id except AttributeError: - LOGGER.info("SA node is not defined, creating with default options.") + LOGGER.info("SA node is not defined. Creating with default options.") sino_atrial_id = self.m.compute_sa_node().node_id try: atrio_ventricular_id = self.m.right_atrium.get_point("AV_node").node_id except AttributeError: - LOGGER.info("AV node is not defined, creating with default options.") + LOGGER.info("AV node is not defined. Creating with default options.") atrio_ventricular_id = self.m.compute_av_node().node_id #! get local SA/AV ids. @@ -188,7 +188,7 @@ def _get_hisbundle_bifurcation(self) -> np.ndarray: """ Define start points of the bundle of His. - End point: create a point inside of septum part and close to AV node . + End point: Create a point inside of the septum part and close to the AV node. """ atrio_ventricular_node = self.m.right_atrium.get_point("AV_node") @@ -285,22 +285,22 @@ def compute_his_conduction(self, beam_length: float = 1.5) -> tuple[_BeamsMesh, def find_path( mesh: pv.UnstructuredGrid, start: np.ndarray, end: np.ndarray, return_segment=True ) -> np.ndarray | tuple[np.ndarray, np.ndarray]: - """Find shortest path between two nodes. + """Find the shortest path between two nodes. Notes ----- - Unlike geodesic, this method searches a path inside of a 3D mesh. + Unlike the geodesic method, this method searches a path inside of a 3D mesh. Parameters ---------- mesh : pv.UnstructuredGrid - Must be with tetra cells. + Mesh that must be with tetra cells. start : np.ndarray Start point coordinates. end : np.ndarray End point coordinates - return_segment : bool, optional - Return a segment set (list of triangles) on which the path relies, by default True + return_segment : bool, default: True + Whether to return the segment set (list of triangles) that the path relies on. """ #! mesh can now have multiple element types: TETRA, TRIANGLE, etc. mesh = mesh.extract_cells_by_type(pv.CellType.TETRA) @@ -391,7 +391,7 @@ def _create_his_side( return (side_his[-1], side_his, sgmt) def compute_left_right_bundle(self, start_coord, end_coord, side: str): - """Bundle branch.""" + """Compute the bundle branch.""" if side == _ConductionType.LEFT_BUNDLE_BRANCH.value: ventricle = self.m.left_ventricle endo_surface = self.m.mesh.get_surface(self.m.left_ventricle.endocardium.id) @@ -457,12 +457,12 @@ def _compute_bachman_bundle( return beam_net def _connect_to_solid(self, component_id: int, local_point_ids: np.ndarray) -> None: - """Connect conduction system component to solid through the "_is-connected" pointdata. + """Connect conduction system component to solid through the ``_is-connected`` pointdata. Parameters ---------- component_id : int - id of the beam mesh component + ID of the beam mesh component, local_point_ids : np.array _description_ """ diff --git a/src/ansys/health/heart/pre/database_utils.py b/src/ansys/health/heart/pre/database_utils.py index 2c2fd5c17..745d84a14 100644 --- a/src/ansys/health/heart/pre/database_utils.py +++ b/src/ansys/health/heart/pre/database_utils.py @@ -42,17 +42,17 @@ def _read_input_mesh(mesh_path: str, database: str) -> pv.UnstructuredGrid: mesh_path : str Path to the mesh file. database : str - Database name + Database name. Returns ------- pv.UnstructuredGrid - Unstructured grid + Unstructured grid. Raises ------ TypeError - If the mesh fails to be imported as an UnstructuredGrid + If the mesh fails to be imported as an UnstructuredGrid. """ mesh: pv.UnstructuredGrid = pv.read(mesh_path) if isinstance(mesh, pv.MultiBlock): @@ -71,17 +71,17 @@ def _read_input_mesh(mesh_path: str, database: str) -> pv.UnstructuredGrid: def _get_original_labels(database: str, case_num: int = None) -> dict: - """Import the original labels based on database name. + """Import the original labels based on a database name. Parameters ---------- database : str - Name of the database. + Database name. Returns ------- dict - Dictionary representing the label to id map. + Dictionary representing the label to ID map. """ match database: case "Strocchi2020": @@ -108,9 +108,9 @@ def _get_interface_surfaces( Parameters ---------- mesh : pv.UnstructuredGrid - Volume mesh + Volume mesh. labels : dict - Label dict to which to add the interface labels + Label dictionary to add the interface labels to. """ tetras = np.reshape(mesh.cells, (mesh.n_cells, 5))[:, 1:] faces, c0, c1 = face_tetra_connectivity(tetras) @@ -160,9 +160,9 @@ def _find_endo_epicardial_regions( Parameters ---------- geom_all : pv.PolyData - Entire heart model + Entire heart model. tag_to_label : dict - Dictionary that maps the tags to the corresponding labels + Dictionary that maps the tags to the corresponding labels. """ geom_all.cell_data["orig_ids"] = np.arange(0, geom_all.n_cells) @@ -228,15 +228,15 @@ def _get_part_definitions(original_labels: dict, boundary_label_to_boundary_id: Parameters ---------- original_labels : dict - Dictionary with the original labels + Dictionary with the original labels. boundary_label_to_boundary_id : dict - Dictionary of the boundary label to boundary id map + Dictionary of the boundary label to boundary ID map. Returns ------- dict - Dictionary with the part definitions. That is part id and corresponding - boundaries that enclose that part. + Dictionary with the part definitions, which is the part ID and corresponding + boundaries that enclose that part. """ part_definitions = {} for original_label, original_tag in original_labels.items(): @@ -343,18 +343,18 @@ def _smooth_boundary_edges( Parameters ---------- surface_mesh : pv.PolyData - Input surface mesh + Input surface mesh. id_to_label_map : dict - ID to label map - sub_label_to_smooth : str, optional - Sub label to smooth, by default "endocardium" - window_size : int, optional - Window size of the smoothing method, by default 5 + ID to label map. + sub_label_to_smooth : str, default: ``'endocardium'`` + Sublabel to smooth. + window_size : int, default: 5 + Window size of the smoothing method. Returns ------- Tuple[pv.PolyData, dict] - Preprocessor compatible polydata object and dictionary with part definitions + Preprocessor-compatible polydata object and dictionary with part definitions. """ surfaces_to_smooth = [ id for id, label in id_to_label_map.items() if sub_label_to_smooth in label @@ -432,21 +432,22 @@ def get_compatible_input( model_type: Literal["FullHeart", "FourChamber", "BiVentricle", "LeftVentricle"] = "FullHeart", database: str = "Rodero2021", ) -> tuple[pv.PolyData, dict]: - """Extract a preprocessor compatible input surface. + """Extract a preprocessor-compatible input surface. Parameters ---------- mesh_path : str - Path to the input mesh (UnstructuredGrid or MultiBlock) - model_type : str, optional - Type of model to extract, by default "FullHeart" - database : str, optional - Database name, by default "Rodero2021" + Path to the input mesh (UnstructuredGrid or MultiBlock). + model_type : str, default: ``'FullHeart'`` + Type of model to extract. Options are ``'FullHeart'``, ``'FourChamber'``, + ``'BiVentricle'``, and ``'LeftVentricle'``. + database : str, default: ``'Rodero2021'`` + Database name. Options are ``'Rodero2021'`` and ``'Strocchi2020'``. Returns ------- Tuple[pv.PolyData, dict] - Preprocessor compatible polydata object and dictionary with part definitions + Preprocessor-compatible polydata object and dictionary with part definitions. """ case_num = os.path.basename(mesh_path) case_num = int(case_num.replace(".case", "").replace(".vtk", "")) diff --git a/src/ansys/health/heart/pre/input.py b/src/ansys/health/heart/pre/input.py index 0994c85a5..45628a8cb 100644 --- a/src/ansys/health/heart/pre/input.py +++ b/src/ansys/health/heart/pre/input.py @@ -24,10 +24,10 @@ Notes ----- -This module manages the different types of input that can be handled and include: -1. User specified boundary mesh. This will require remeshing. +This module manages the different types of input that can be handled and includes +user-specified boundary meshes. This require remeshing. -Methods are provided to validate the volume and boundary mesh objects (pyvista objects), +Methods are provided to validate the volume and boundary mesh objects (PyVista objects), and to get the necessary parts or boundaries for each respective model. """ @@ -99,7 +99,7 @@ "Pulmonary artery wall": {"id": 7, "enclosed_by_boundaries": {"pulmonary-artery-wall": 25}}, } -# the different types of "base" models supported +# different types of "base" models supported _HEART_MODELS = { "LeftVentricle": ["Left ventricle myocardium"], "BiVentricle": ["Left ventricle myocardium", "Right ventricle myocardium", "Septum"], @@ -142,9 +142,9 @@ def __init__( var_inp, faces, n_faces, lines, n_lines, strips, n_strips, deep, force_ext, force_float ) self.id = id - """ID of boundary.""" + """Boundary ID.""" self.name = name - """Name of boundary.""" + """Boundary name.""" def __repr__(self): return f"Name:{self.name}\nid:{self.id}\n{super().__repr__()}" @@ -155,9 +155,9 @@ def __init__(self, name="", id=None, boundaries: list[_InputBoundary] = []) -> N if not isinstance(boundaries, list): raise TypeError("Boundaries should be a list.") self.name = name - """Name of part.""" + """Part name.""" self.id = id - """id of part.""" + """Part ID.""" self.boundaries: list[_InputBoundary] = boundaries """list of boundaries that enclose the part.""" pass @@ -202,10 +202,10 @@ class _InputModel: Notes ----- Supported inputs include: - 1. [NotImplemented] Unstructured grid file or object with part-ids + 1. [NotImplemented] Unstructured grid file or object with part IDs 2. [NotImplemented] Multiblock VTK file or object with a single UnstructuredGrid block or with multiple PolyData objects - 3. PolyData file or object with boundary-ids + 3. PolyData file or object with boundary ID """ def __init__( @@ -225,7 +225,7 @@ def __init__( if isinstance(input, (Path, str)): LOGGER.info(f"Reading {input}...") if not os.path.isfile(input): - raise FileNotFoundError(f"File {input} not found.") + raise FileNotFoundError(f"File {input} is not found.") try: self.input_polydata = pv.PolyData(input) @@ -233,12 +233,10 @@ def __init__( raise NotImplementedError(f"Failed to load file {input}. {e}") if part_definitions is None: - LOGGER.error("Please specify part definitions.") + LOGGER.error("Specify part definitions.") return None if scalar is None: - LOGGER.error( - "Please specify a scalar that is used to identify the enclosing boundaries." - ) + LOGGER.error("Specify a scalar that is used to identify the enclosing boundaries.") return None if scalar != "boundary-id": @@ -262,7 +260,7 @@ def parts(self): @property def part_ids(self): - """List of part ids.""" + """List of part IDs.""" return [p.id for p in self._parts] @property @@ -282,7 +280,7 @@ def boundary_names(self): @property def boundary_ids(self): - """List of boundary ids.""" + """List of boundary IDs.""" return [b.id for b in self.boundaries] @property @@ -314,8 +312,8 @@ def _add_parts(self, part_definitions: dict) -> dict: Notes ----- - The part definitions only change if multiple ids are given for a single surface. - The first item in the list defines the new boundary id, and the others are merged. + The part definitions only change if multiple IDs are given for a single surface. + The first item in the list defines the new boundary ID, and the others are merged. """ is_visited = np.full(self.input_polydata.n_cells, False) @@ -359,7 +357,7 @@ def _validate_if_parts_manifold(self): return True def _validate_uniqueness(self): - """Validate whether there are any boundaries with duplicate ids or names.""" + """Validate whether there are any boundaries with duplicate IDs or names.""" is_valid = True # check id to name map mapper = {} @@ -369,7 +367,7 @@ def _validate_uniqueness(self): else: if b.name != mapper[b.id]: LOGGER.error( - "Boundary with id {0} has name {1} but expecting name {2}".format( + "Boundary with ID {0} has name {1} but expecting name {2}".format( b.id, b.name, mapper[b.id] ) ) @@ -382,19 +380,19 @@ def _validate_uniqueness(self): else: if b.id != mapper[b.name]: LOGGER.error( - "Boundary with name {0} has id {1} but expecting id {2}".format( + "Boundary with name {0} has ID {1} but expecting ID {2}".format( b.name, b.id, mapper[b.name] ) ) is_valid = False if not is_valid: - LOGGER.warning("Please specify unique boundary name/id combination.") + LOGGER.warning("Specify unique boundary name/ID combination.") return is_valid def _validate_input(self): """Validate whether the provided scalars or list of scalars yield non-empty meshes.""" if len(self.parts) == 0: - LOGGER.warning("No parts defined, nothing to validate.") + LOGGER.warning("No parts are defined. There is nothing to validate.") return None for part in self.parts: for b in part.boundaries: @@ -408,7 +406,7 @@ def plot(self, show_edges: bool = True): import matplotlib as mpl except ImportError as error: LOGGER.error( - f"Failed to import matplotlib. Install with pip install matplotlib. {error}" + f"Failed to import Matplotlib. Install with 'pip install matplotlib'. {error}" ) return import matplotlib as mpl @@ -463,7 +461,7 @@ def _invert_dict(d: dict) -> dict: def _get_required_parts(model_type: str) -> dict: - """Get a dict of required parts for the given model.""" + """Get a dictionary of required parts for the given model.""" try: part_names = _HEART_MODELS[model_type] except KeyError: @@ -479,7 +477,7 @@ def _get_required_parts(model_type: str) -> dict: def _get_part_name_to_part_id_map() -> dict: - """Get map that maps the part names to the part ids.""" + """Get the map that maps the part names to the part iIDs.""" mapper = {} for k, value in _BOUNDARIES_PER_HEART_PART.items(): mapper[k] = value["id"] @@ -487,12 +485,12 @@ def _get_part_name_to_part_id_map() -> dict: def _get_part_id_to_part_name_map() -> dict: - """Get map that maps the part ids to the part names.""" + """Get the map that maps the part IDs to the part names.""" return _invert_dict(_get_part_name_to_part_id_map()) def _get_boundary_name_to_boundary_id_map() -> dict: - """Get the map that maps the boundary name to the boundary id.""" + """Get the map that maps the boundary name to the boundary ID.""" mapper = {} for part_name, part_subdict in _BOUNDARIES_PER_HEART_PART.items(): mapper.update(part_subdict["enclosed_by_boundaries"]) @@ -500,12 +498,12 @@ def _get_boundary_name_to_boundary_id_map() -> dict: def _get_boundary_id_to_boundary_name_map() -> dict: - """Get the map that maps the boundary name to the boundary id.""" + """Get the map that maps the boundary IDs to the boundary names.""" return _invert_dict(_get_boundary_name_to_boundary_id_map()) def _get_required_boundaries(model_type: str) -> list[str]: - """Return a list of boundaries required for the given model.""" + """Get a list of boundaries required for a given model.""" parts = _get_required_parts(model_type) required_boundaries = [] for p in parts: diff --git a/src/ansys/health/heart/pre/mesher.py b/src/ansys/health/heart/pre/mesher.py index 4dd8e389e..e872c6a88 100644 --- a/src/ansys/health/heart/pre/mesher.py +++ b/src/ansys/health/heart/pre/mesher.py @@ -56,7 +56,7 @@ def _get_supported_fluent_version() -> str: - """Use pyfluent to get a supported Fluent version.""" + """Use PyFluent to get a supported Fluent version.""" if os.getenv("PYANSYS_HEART_FLUENT_VERSION", None): version = os.getenv("PYANSYS_HEART_FLUENT_VERSION") if version not in _supported_fluent_versions: @@ -76,8 +76,8 @@ def _get_supported_fluent_version() -> str: except Exception: pass raise SupportedFluentVersionNotFoundError( - f"""Did not find a supported Fluent version, - please install one of {_supported_fluent_versions}""" + f"""Did not find a supported Fluent version. + Install one of these versions: {_supported_fluent_versions}""" ) @@ -88,7 +88,7 @@ def _get_supported_fluent_version() -> str: def _get_face_zones_with_filter(pyfluent_session, prefixes: list) -> list[str]: - """Get list of available boundaries in Fluent session that use any of the prefixes.""" + """Get a list of available boundaries in a Fluent session that uses any of the prefixes.""" face_zones = [] # get unique prefixes prefixes = list(set(prefixes)) @@ -128,7 +128,7 @@ def _organize_connected_regions( if num_regions == 1: continue - LOGGER.debug(f"Found {num_regions - 1} unnconnected regions, find connected candidate.") + LOGGER.debug(f"Found {num_regions - 1} unnconnected regions. Find connected candidate.") # for each region, find to what "main" region it is connected. for region in np.unique(conn.cell_data["RegionId"])[1:]: orphan_cell_ids = conn.cell_data["orig-cell-ids"][conn.cell_data["RegionId"] == region] @@ -142,7 +142,7 @@ def _organize_connected_regions( grid.cell_data["part-id"][connected_cell_ids], return_counts=True ) if unique_ids.shape[0] > 1: - LOGGER.debug("More than 1 candidate.") + LOGGER.debug("More than one candidate.") grid.cell_data["part-id"][orphan_cell_ids] = unique_ids[np.argmax(counts)] @@ -152,15 +152,15 @@ def _organize_connected_regions( def _assign_part_id_to_orphan_cells( grid: pv.UnstructuredGrid, scalar="part-id" ) -> pv.UnstructuredGrid: - """Use closest point interpolation to assign part id to orphan cells.""" + """Use closest point interpolation to assign part ID to orphan cells.""" grid.cell_data["_original-cell-ids"] = np.arange(0, grid.n_cells) orphans = grid.extract_cells(grid.cell_data[scalar] == 0) if orphans.n_cells == 0: - LOGGER.debug("No orphan cells detected.") + LOGGER.debug("No orphan cells are detected.") return grid - LOGGER.debug(f"Assigning part ids to {orphans.n_cells} orphan cells.") + LOGGER.debug(f"Assigning part IDs to {orphans.n_cells} orphan cells...") grid_centers = grid.cell_centers() grid_centers = grid_centers.extract_points(grid_centers.cell_data["part-id"] != 0) @@ -229,7 +229,7 @@ def _get_cells_inside_wrapped_parts(model: _InputModel, mesh: _FluentMesh) -> pv grid2 = _assign_part_id_to_orphan_cells(grid1) if np.any(grid2.cell_data["part-id"] == 0): - LOGGER.warning("Not all cells have a part id assigned.") + LOGGER.warning("Not all cells have a part ID assigned.") return grid2 @@ -244,7 +244,7 @@ def _get_fluent_meshing_session(working_directory: str | Path) -> MeshingSession else: product_version = _fluent_version - LOGGER.info(f"Launching meshing session with {product_version}") + LOGGER.info(f"Launching meshing session with {product_version}...") if _uses_container: num_cpus = 1 @@ -314,7 +314,7 @@ def _wrap_part(session: MeshingSession, boundary_names: list, wrapped_part_name: def _to_fluent_convention(string_to_convert: str) -> str: - """Convert string to Fluent-supported convention.""" + """Convert string to the Fluent-supported convention.""" return string_to_convert.lower().replace(" ", "_") @@ -330,11 +330,11 @@ def _update_size_per_part( global_size : float Global size to use for parts that are not referenced. part_names : list[str] - Part names involved in the model/ - size_per_part : dict, optional - Size per part used to override global size, by default None + Part names involved in the model. + size_per_part : dict, default: None + Size per part used to override global size. """ - # convert both to Fluent naming convention. Note: so remove cases and spaces + # convert both to Fluent-naming convention. Note: remove cases and spaces part_names = [_to_fluent_convention(part) for part in part_names] if size_per_part is not None: size_per_part = {_to_fluent_convention(part): size for part, size in size_per_part.items()} @@ -357,11 +357,11 @@ def _update_input_model_with_wrapped_surfaces( Parameters ---------- model : _InputModel - Input model to be updated. + Input model to bupdate. mesh : FluentMesh Fluent mesh containing all wrapped face zones. face_zone_ids_per_part : dict - Face zone ids for each part. + Face zone IDs for each part. Returns ------- @@ -374,7 +374,7 @@ def _update_input_model_with_wrapped_surfaces( fz for fz in mesh.face_zones if fz.id in face_zone_ids_per_part[part.name] ] if len(face_zones_wrapped) == 0: - LOGGER.error(f"Did not find any wrapped face zones for {part.name}") + LOGGER.error(f"Did not find any wrapped face zones for {part.name}.") # replace with remeshed face zones, note that we may have more face zones now. remeshed_boundaries = [] @@ -484,14 +484,14 @@ def _mesh_fluid_cavities( caps : List[SurfaceMesh] List of caps that close each of the cavities. workdir : str - Working directory - remesh_caps : bool, optional - Flag indicating whether to remesh the caps, by default True + Working directory. + remesh_caps : bool, default: True + whether to remesh the caps. Returns ------- Path - Path to the .msh.h5 volume mesh. + Path to the ``.msh.h5`` volume mesh. """ if _uses_container: mounted_volume = pyfluent.EXAMPLES_PATH @@ -571,7 +571,7 @@ def mesh_from_manifold_input_model( mesh_size: float = 2.0, overwrite_existing_mesh: bool = True, ) -> Mesh: - """Create mesh from good-quality manifold input model. + """Create mesh from a good-quality manifold input model. Parameters ---------- @@ -581,13 +581,13 @@ def mesh_from_manifold_input_model( Working directory. path_to_output : Union[str, Path] Path to the resulting Fluent mesh file. - mesh_size : float, optional - Uniform mesh size to use for both wrapping and filling the volume, by default 2.0 + mesh_size : float, default: 2.0 + Uniform mesh size to use for both wrapping and filling the volume. Returns ------- Mesh - The VTK mesh with both cell and face zones. + VTK mesh with both cell and face zones. """ smooth_boundaries = False fix_intersections = False @@ -729,7 +729,7 @@ def mesh_from_manifold_input_model( if not surface.is_manifold: LOGGER.warning( - "Part {0} not manifold - disabled surface check.".format(input_part.name) + "Part {0} is not manifold. Disabled surface check.".format(input_part.name) ) for cz in mesh.cell_zones: # use centroid of first cell to find which input part it belongs to. @@ -769,39 +769,39 @@ def mesh_from_non_manifold_input_model( Working directory. path_to_output : Union[str, Path] Path to the resulting Fluent mesh file. - global_mesh_size : float, optional - Uniform mesh size to use for all volumes and surfaces, by default 2.0 - _global_wrap_size : float, optional - Global size used by the wrapper to reconstruct the geometry, by default 1.5 - overwrite_existing_mesh : bool, optional - Flag indicating whether to overwrite an existing mesh, by default True - mesh_size_per_part : dict, optional - Dictionary specifying the mesh size that should be used for each part, by default None - _wrap_size_per_part : dict, optional - Dictionary specifying the mesh size that should be used to wrap each part, by default None + global_mesh_size : float, default: 2.0 + Uniform mesh size to use for all volumes and surfaces. + _global_wrap_size : float, default: 1.5 + Global size used by the wrapper to reconstruct the geometry. + overwrite_existing_mesh : bool, default: True + FWhether to overwrite an existing mesh. + mesh_size_per_part : dict, default: None + Dictionary specifying the mesh size that should be used for each part. + _wrap_size_per_part : dict, default: None + Dictionary specifying the wrap size that should be used to wrap each part. Notes ----- - Uses Fluent wrapping technology to wrap the individual parts first to create manifold - parts. Consequently wrap the entire model and use the manifold parts to split the - wrapped model into the different cell zones. + This method Uses Fluent wrapping technology to wrap the individual parts, first + to create manifold parts. Consequently, wrap the entire model and use the manifold + parts to split the wrapped model into the different cell zones. When specifying a mesh size per part, you can do that by either specifying that for all - parts, or for specific parts. The default mesh size will be used for any part not listed - in the dictionary. This also applies to the wrapping step. The user can control the wrap size - per part, or on a global level. By default a size of 1.5 mm is used: but is not guaranteed to - give good results. + parts or for specific parts. The default mesh size is used for any part not listed + in the dictionary. This also applies to the wrapping step. You can control the wrap size + per part or on a global level. By default, a size of 1.5 mm is used, but this value is not + guaranteed to give good results. Note that a post-wrap remesh is triggered if the wrap size is not equal to the target mesh size. - Remeshing may fail if the target mesh size deviates too much from the wrap size. + Remeshing might fail if the target mesh size deviates too much from the wrap size. Returns ------- Mesh - The VTK mesh with both cell and face zones. + VTK mesh with both cell and face zones. """ if not isinstance(model, _InputModel): - raise ValueError(f"Expecting input to be of type {str(_InputModel)}") + raise ValueError(f"Expecting input to be of type {str(_InputModel)}.") mesh_size_per_part = _update_size_per_part( model.part_names, global_mesh_size, mesh_size_per_part @@ -915,7 +915,7 @@ def mesh_from_non_manifold_input_model( f"(get-face-zones-of-objects '({part.name}) )" ) - # NOTE: wrap entire model in one pass so that we can create a single volume mesh. + # NOTE: wrap entire model in one pass so that a single volume mesh can be created. # Use list of all input boundaries as input. Uses external material point for meshing. # This assumes that all the individually wrapped parts form a single # connected structure. @@ -982,18 +982,18 @@ def mesh_from_non_manifold_input_model( part.name = _to_fluent_convention(part.name) LOGGER.info("Post Fluent-Meshing cleanup...") - # Update the cell zones such that for each part we have a separate cell zone. + # update cell zones such that each part has a separate cell zone mesh = _FluentMesh() mesh.load_mesh(path_to_output) mesh._fix_negative_cells() - # update the input model with the wrapped surfaces. + # update input model with wrapped surfaces model = _update_input_model_with_wrapped_surfaces(model, mesh, part_face_zone_ids_post_wrap) # get cells inside each of the wrapped parts. grid = _get_cells_inside_wrapped_parts(model, mesh) - # Ensure that parts are continuous and well connected. + # ensure parts are continuous and well connected. grid = _organize_connected_regions(grid, scalar="part-id") if np.any(grid.cell_data["part-id"] == 0): @@ -1022,7 +1022,7 @@ def mesh_from_non_manifold_input_model( cell_zone.get_cells(new_mesh.cells) new_mesh.cell_zones.append(cell_zone) - # keep just the face zones of the entire wrapped model and the corresponding + # keep only the face zones of the entire wrapped model and the corresponding # interior face zone new_mesh.face_zones = [ fz @@ -1030,7 +1030,7 @@ def mesh_from_non_manifold_input_model( if "model:" in fz.name.lower() or "interior-" in fz.name.lower() ] - # rename face zones - rename to original input names. + # rename face zones to original input names for fz in new_mesh.face_zones: if "interior" in fz.name: continue @@ -1038,7 +1038,7 @@ def mesh_from_non_manifold_input_model( if ":" in fz.name: fz.name = fz.name.split(":")[0] - # Use only cell zones that are inside the parts defined in the input. + # Use only cell zones that are inside the parts defined in the input new_mesh.cell_zones = [cz for cz in new_mesh.cell_zones if cz.id in model.part_ids] vtk_mesh = _post_meshing_cleanup(new_mesh) diff --git a/src/ansys/health/heart/settings/defaults/electrophysiology.py b/src/ansys/health/heart/settings/defaults/electrophysiology.py index 21b05ba89..55a0b3ac5 100644 --- a/src/ansys/health/heart/settings/defaults/electrophysiology.py +++ b/src/ansys/health/heart/settings/defaults/electrophysiology.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Module contains default values for electrophysiology simulations.""" +"""Module contains default values for EP (Electrophysiology) simulations.""" from pint import Quantity diff --git a/src/ansys/health/heart/settings/defaults/mechanics.py b/src/ansys/health/heart/settings/defaults/mechanics.py index ca4331f90..0607f969c 100644 --- a/src/ansys/health/heart/settings/defaults/mechanics.py +++ b/src/ansys/health/heart/settings/defaults/mechanics.py @@ -110,7 +110,7 @@ "k1f": Quantity(0.00049, "MPa"), "k2f": Quantity(9.01, "dimensionless"), }, - # Note: Mechanical simulation use actype=1, EP-Mechanical simulation use actype=3 + # Note: Mechanical simulation uses actype=1, EP-Mechanical simulation uses actype=3 # related parameters are hard coded # For more advanced control, use Material class "active": { diff --git a/src/ansys/health/heart/simulator.py b/src/ansys/health/heart/simulator.py index da4e560d8..7d2b37606 100644 --- a/src/ansys/health/heart/simulator.py +++ b/src/ansys/health/heart/simulator.py @@ -25,8 +25,8 @@ Options for simulation: - EP-only - with/without fbers. - with/without purkinje. + with/without fibers. + with/without Purkinje. - Electro-mechanics simplified EP (imposed activation). coupled electro-mechanics. @@ -62,7 +62,7 @@ import ansys.health.heart.writer.dynawriter as writers _KILL_ANSYSCL_PRIOR_TO_RUN = True -"""Flag indicating whether to kill all ansys license clients prior to LS-DYNA run.""" +"""Flag indicating whether to kill all Ansys license clients prior to an LS-DYNA run.""" class BaseSimulator: @@ -82,8 +82,8 @@ def __init__( Heart model to simulate. dyna_settings : DynaSettings Settings used for launching LS-DYNA. - simulation_directory : Path, optional - Directory in which to start the simulation, by default "" + simulation_directory : Path, default: "" + Directory to start the simulation in. """ self.model: HeartModel = model @@ -97,9 +97,9 @@ def __init__( if self.dyna_settings.platform != "wsl": if shutil.which(self.dyna_settings.lsdyna_path) is None: - LOGGER.error(f"{self.dyna_settings.lsdyna_path} does not exist") + LOGGER.error(f"{self.dyna_settings.lsdyna_path} does not exist.") raise LSDYNANotFoundError( - f"LS-DYNA executable {self.dyna_settings.lsdyna_path} not found." + f"LS-DYNA executable {self.dyna_settings.lsdyna_path} file is not found." ) if simulation_directory == "": @@ -121,14 +121,14 @@ def load_default_settings(self) -> SimulationSettings: def compute_fibers( self, method: Literal["LSDYNA", "D-RBM"] = "LSDYNA", rotation_angles: dict = None ): - """Compute the fiber-sheet directions on ventricle(s). + """Compute the fiber sheet directions on the ventricles. Parameters ---------- - method : Literal["LSDYNA", "D, optional - method, by default "LSDYNA" - rotation_angles : dict, optional - rotation angle alpha and beta, by default None + method : Literal["LSDYNA", "D-RBM"], default: "LSDYNA" + Method to compute the fiber orientation. + rotation_angles : dict, default: None + Rotation angle alpha and beta. """ LOGGER.info("Computing fiber orientation...") @@ -139,7 +139,7 @@ def compute_fibers( for name in ["alpha", "beta", "beta_septum"]: if name not in rotation_angles.keys(): - LOGGER.error(f"Must provide key {name} for D-RBM method") + LOGGER.error(f"Must provide key {name} for D-RBM method.") exit() self._compute_fibers_lsdyna(rotation_angles) @@ -151,12 +151,12 @@ def compute_fibers( for a, b in zip(["alpha", "beta"], ["_left", "_right", "_ot"]): if a + b not in rotation_angles.keys(): - LOGGER.error(f"Must provide key {name} for D-RBM method") + LOGGER.error(f"Must provide key {name} for D-RBM method.") exit() self._compute_fibers_drbm(rotation_angles) else: - LOGGER.error(f"Method {method} not recognized") + LOGGER.error(f"Method {method} is not recognized.") exit() return @@ -204,7 +204,7 @@ def _compute_fibers_lsdyna(self, rotation_angles: dict): return def compute_uhc(self) -> pv.UnstructuredGrid: - """Compute universal 'heart' coordinates system.""" + """Compute universal heart coordinates system.""" LOGGER.info("Computing universal ventricular coordinates...") type = "uvc" @@ -237,36 +237,36 @@ def compute_right_atrial_fiber( self, appendage: list[float], top: list[list[float]] = None ) -> pv.UnstructuredGrid: """ - Compute right atrium fiber with LDRBD method. + Compute right atrium fiber with the LDRBD method. Parameters ---------- appendage: list[float] Coordinates of appendage. - - top : list[list[float]], optional - A list of nodal coordinates to define the top path. By default, this is set to None. + top : list[list[float]], default: None + List of nodal coordinates to define the top path. The top path is a set of nodes connecting the superior (SVC) and inferior (IVC) vena cava. - Refer to `Notes` for more details. - The default method (top=None) may not work for some anatomical structures. In such cases, - you can define the start and end points by providing a list of coordinates, - e.g., [[x1, y1, z1], [x2, y2, z2]]. These two nodes should be located on the SVC and IVC - rings, approximately at the 12 o'clock position. + For more information, see the "Notes" section. + The default method (``top=None``) might not work for some anatomical structures. + In such cases, you can define the start and end points by providing a list of coordinates + like this: ``[[x1, y1, z1], [x2, y2, z2]]``. These two nodes should be located on the + SVC and IVC rings, approximately at the 12 o'clock position. - You can also add an intermediate point to enforce the geodesic path, - e.g., [[x1, y1, z1], [x3, y3, z3], [x2, y2, z2]]. + You can also add an intermediate point to enforce the geodesic path, like this: + ``[[x1, y1, z1], [x3, y3, z3], [x2, y2, z2]]``. Returns ------- pv.UnstructuredGrid - Left atrium with fiber coordinates system 'e_l', 'e_t' and 'e_n'. + Right atrium with fiber coordinates system in this format: ``e_l``, ``e_t`` and ``e_n``. Notes ----- - the method is described in https://doi.org/10.1016/j.cma.2020.113468 + The method is described in `Modeling cardiac muscle fibers in ventricular and atrial + electrophysiology simulations `. """ - LOGGER.info("Computing RA fiber...") + LOGGER.info("Computing right atrium fiber...") export_directory = os.path.join(self.root_directory, "ra_fiber") target = self.run_laplace_problem( @@ -277,7 +277,7 @@ def compute_right_atrial_fiber( export_directory, self.settings.atrial_fibers, endo_surface=None ) ra_pv.save(os.path.join(export_directory, "ra_fiber.vtu")) - LOGGER.info("Generating fibers done.") + LOGGER.info("Generating fibers is done.") # arrays that save ID map to full model ra_pv["cell_ids"] = target["cell_ids"] @@ -296,24 +296,25 @@ def compute_left_atrial_fiber( self, appendage: list[float] = None, ) -> pv.UnstructuredGrid: - """Compute left atrium fiber with LDRBD method. + """Compute left atrium fiber with the LDRBD method. Parameters ---------- - appendage : list[float], optional - Coordinates of appendage, by default None - If not defined, we use the cap named 'appendage'. + appendage : list[float], default: None + Coordinates of the appendage. If no value is specified, + the cap named ``appendage`` is used. Returns ------- pv.UnstructuredGrid - Right atrium with fiber coordinates system 'e_l', 'e_t' and 'e_n'. + Left atrium with fiber coordinates system in this format: ``e_l``, ``e_t`` and ``e_n``. Notes ----- - the method is described in https://doi.org/10.1016/j.cma.2020.113468 + The method is described in `Modeling cardiac muscle fibers in ventricular and atrial + electrophysiology simulations `. """ - LOGGER.info("Computing LA fiber...") + LOGGER.info("Computing left atrium fiber...") export_directory = os.path.join(self.root_directory, "la_fiber") target = self.run_laplace_problem(export_directory, "la_fiber", laa=appendage) @@ -322,7 +323,7 @@ def compute_left_atrial_fiber( export_directory, self.settings.atrial_fibers, endo_surface=None ) la_pv.save(os.path.join(export_directory, "la_fiber.vtu")) - LOGGER.info("Generating fibers done.") + LOGGER.info("Generating fibers is done.") # arrays that save ID map to full model la_pv["cell_ids"] = target["cell_ids"] @@ -341,7 +342,7 @@ def run_laplace_problem( self, export_directory, type: Literal["uvc", "la_fiber", "ra_fiber"], **kwargs ): """ - Run Laplace-Dirichlet (thermal) problem in LSDYNA. + Run the Laplace-Dirichlet (thermal) problem in LS-DYNA. Parameters ---------- @@ -350,11 +351,11 @@ def run_laplace_problem( type: str Simulation type. kwargs : dict - Landmarks to create nodeset, keys can be 'laa','raa','top'. + Landmarks to create the nodeset. Keys can be ``laa``, ``raa``, and ``top``'. Returns ------- - UnstructuredGrid with array to map data back to full mesh. + UnstructuredGrid with array to map data back to the full mesh. """ for k, v in kwargs.items(): @@ -372,19 +373,19 @@ def run_laplace_problem( input_file = os.path.join(export_directory, "main.k") self._run_dyna(path_to_input=input_file, options="case") - LOGGER.info("Solving laplace-dirichlet done.") + LOGGER.info("Solving Laplace-Dirichlet problem is done.") return dyna_writer.target def _run_dyna(self, path_to_input: pathlib, options: str = ""): - """Run LS-DYNA with path and options. + """Run LS-DYNA with the specified input file and options. Parameters ---------- path_to_input : Path Path to the LS-DYNA simulation file. - options : str, optional - Additional options to pass to command line, by default "". + options : str, default: "" + Additional options to pass to the command line. """ if options != "": @@ -402,7 +403,7 @@ def _run_dyna(self, path_to_input: pathlib, options: str = ""): class EPSimulator(BaseSimulator): - """EP Simulator.""" + """EP (electrophysiology) simulator.""" def __init__( self, @@ -410,20 +411,20 @@ def __init__( dyna_settings: DynaSettings, simulation_directory: pathlib = "", ) -> None: - """Initialize EP Simulator.""" + """Initialize the EP simulator.""" super().__init__(model, dyna_settings, simulation_directory) return def simulate(self, folder_name="main-ep", extra_k_files: list[str] = []): - """Launch the electrophysiology simulation. + """Launch the EP simulation. Parameters ---------- - folder_name : str, optional - simulation folder name, by default "main-ep" - extra_k_files : list[str], optional - user defined k files, by default [] + folder_name : str, default: ``'main-ep'`` + Simulation folder name. + extra_k_files : list[str], default: [] + User-defined k files. """ directory = os.path.join(self.root_directory, folder_name) self._write_main_simulation_files(folder_name, extra_k_files=extra_k_files) @@ -433,12 +434,12 @@ def simulate(self, folder_name="main-ep", extra_k_files: list[str] = []): input_file = os.path.join(directory, "main.k") self._run_dyna(input_file) - LOGGER.info("done.") + LOGGER.info("Simulation completed successfully.") return def _simulate_conduction(self, folder_name="main-ep-onlybeams"): - """Launch the main simulation.""" + """Launch the main EP simulation.""" directory = os.path.join(self.root_directory, folder_name) self._write_main_conduction_simulation_files(folder_name) @@ -447,12 +448,12 @@ def _simulate_conduction(self, folder_name="main-ep-onlybeams"): input_file = os.path.join(directory, "main.k") self._run_dyna(input_file) - LOGGER.info("done.") + LOGGER.info("Simulation completed successfully.") return def compute_purkinje(self): - """Compute the purkinje network.""" + """Compute the Purkinje network.""" directory = os.path.join(self.root_directory, "purkinjegeneration") self._write_purkinje_files(directory) @@ -461,7 +462,7 @@ def compute_purkinje(self): # self.settings.save(os.path.join(directory, "simulation_settings.yml")) - LOGGER.debug("Compute Purkinje network on 1 cpu.") + LOGGER.debug("Compute Purkinje network on one CPU.") orig_num_cpus = self.dyna_settings.num_cpus self.dyna_settings.num_cpus = 1 @@ -469,9 +470,9 @@ def compute_purkinje(self): self._run_dyna(input_file) self.dyna_settings.num_cpus = orig_num_cpus - LOGGER.debug(f"Set number of cpus back to {orig_num_cpus}.") + LOGGER.debug(f"Set number of CPUs back to {orig_num_cpus}.") - LOGGER.info("done.") + LOGGER.info("Simulation completed successfully.") LOGGER.info("Assign the Purkinje network to the model...") @@ -514,11 +515,11 @@ def compute_conduction_system(self): # ) cs._connect_to_solid(component_id=3, local_point_ids=0) else: - LOGGER.info("Not implemented for other than FourChamber models.") + LOGGER.info("Computation is only implemented for four-chamber heart models.") return cs def _write_main_simulation_files(self, folder_name, extra_k_files: list[str] = []): - """Write LS-DYNA files that are used to start the main simulation.""" + """Write LS-DYNA files that are used to start the main EP simulation.""" export_directory = os.path.join(self.root_directory, folder_name) model = copy.deepcopy(self.model) @@ -529,7 +530,7 @@ def _write_main_simulation_files(self, folder_name, extra_k_files: list[str] = [ return def _write_main_conduction_simulation_files(self, folder_name): - """Write LS-DYNA files that are used to start the main simulation.""" + """Write LS-DYNA files that are used to start the main EP simulation.""" export_directory = os.path.join(self.root_directory, folder_name) model = copy.deepcopy(self.model) @@ -564,9 +565,9 @@ def __init__( super().__init__(model, dyna_settings, simulation_directory) self.initial_stress = initial_stress - """If stress free computation is taken into considered.""" + """If stress-free computation is taken into consideration.""" self._dynain_name = None - """lsdyna initial state file name, from zeropressure.""" + """LS-DYNA initial state file name from zeropressure.""" return def simulate( @@ -580,20 +581,20 @@ def simulate( Parameters ---------- - folder_name : str, optional - simulation folder name, by default "main-mechanics" - zerop_folder : str | None, optional - folder contains stress free simulation. - Use "zeropressure" under root directory if None - auto_post : bool, optional - Flag indicating whether to run post-process scripts, by default True - extra_k_files : list[str], optional - user defined k files, by default [] + folder_name : str, default: ``'main-mechanics'`` + Simulation folder name. + zerop_folder : str | None, default: None + Folder containing stress-free simulation. + If ``None``, ``zeropressure`` under the root directory is used. + auto_post : bool, default: True + Whether to run postprocessing scripts. + extra_k_files : list[str], default: [] + User-defined k files. """ if "apico-basal" not in self.model.mesh.point_data.keys(): LOGGER.warning( - "Array named 'apico-basal' cannot be found, will compute" - "universal coordinate system (UVC) firstly." + "Array named ``apico-basal`` cannot be found. Computing" + "universal coordinate system (UVC) first." ) self.compute_uhc() @@ -620,7 +621,7 @@ def simulate( return def _find_dynain_file(self, zerop_folder) -> str: - """Find dynain.lsda file of last iteration.""" + """Find the ``dynain.lsda`` file of the last iteration.""" if zerop_folder is None: zerop_folder = os.path.join(self.root_directory, "zeropressure") @@ -629,13 +630,13 @@ def _find_dynain_file(self, zerop_folder) -> str: dynain_files = natsort.natsorted(dynain_files) if len(dynain_files) == 0: - error_message = f"Files iter*.dynain.lsda not found in {zerop_folder}" + error_message = f"Files 'iter*.dynain.lsda` not found in {zerop_folder}." LOGGER.error(error_message) raise FileNotFoundError(error_message) elif len(dynain_files) == 1: error_message = ( - f"Only 1 iter*.dynain.lsda is found in {zerop_folder}, expect at least 2." + f"Only 1 'iter*.dynain.lsda' file is found in {zerop_folder}. Expected at least 2." ) LOGGER.error(error_message) @@ -657,12 +658,12 @@ def compute_stress_free_configuration( Parameters ---------- - folder_name : str, optional - simulation folder name, by default "zeropressure" - overwrite : bool, optional - run simulation and overwrite files, by default True - extra_k_files : list[str], optional - user defined k files, by default [] + folder_name : str, default: ``'zeropressure'`` + Simulation folder name. + overwrite : bool, default: True + Whether to run simulation and overwrite files. + extra_k_files : list[str], default: [] + User-defined k files. """ directory = os.path.join(self.root_directory, folder_name) @@ -674,20 +675,20 @@ def compute_stress_free_configuration( LOGGER.info("Computing stress-free configuration...") self._run_dyna(os.path.join(directory, "main.k"), options="case") - LOGGER.info("Simulation done.") + LOGGER.info("Simulation is done.") else: - LOGGER.info(f"Reusing existing results in {directory}") + LOGGER.info(f"Reusing existing results in {directory}.") report, stress_free_coord, guess_ed_coord = zerop_post(directory, self.model) # replace node coordinates by computed ED geometry - LOGGER.info("Updating nodes.") + LOGGER.info("Updating nodes...") self.model.mesh.points = guess_ed_coord #! Note that it is not always clear if the contents of the retrieved - #! surface is actually properly copied to the object of which the surface - #! is an attribute (part.surface). That is, is `=` actually working here? + #! surface is actually properly copied to the object that the surface + #! is an attribute (part.surface) of. That is, is `=` actually working here? for part in self.model.parts: for surface in part.surfaces: surface = self.model.mesh.get_surface(surface.id) @@ -712,7 +713,7 @@ def _write_main_simulation_files( return def _write_stress_free_configuration_files(self, folder_name, extra_k_files: list[str] = []): - """Write LS-DYNA files to compute stress-free configuration.""" + """Write LS-DYNA files to compute the stress-free configuration.""" export_directory = os.path.join(self.root_directory, folder_name) model = copy.deepcopy(self.model) @@ -728,7 +729,7 @@ def _write_stress_free_configuration_files(self, folder_name, extra_k_files: lis class EPMechanicsSimulator(EPSimulator, MechanicsSimulator): - """Coupled EP-mechanics simulator with computed Electrophysiology.""" + """Coupled EP-mechanics simulator with computed electrophysiology.""" def __init__( self, @@ -751,15 +752,15 @@ def simulate( Parameters ---------- - folder_name : str, optional - simulation folder name, by default "main-mechanics" - zerop_folder : str | None, optional - folder contains stress free simulation. - Use "zeropressure" under roo_directory if None - auto_post : bool, optional - if run post-process scripts, by default True - extra_k_files : list[str], optional - user defined k files, by default [] + folder_name : str, default: ``'main-mechanics'`` + Simulation folder name. + zerop_folder : str | None, default: None + Folder containing the stress-free simulation. + Use ``'zeropressure'`` under the root_directory if ``None`` is used. + auto_post : bool, default: True + Whether to run postprocessing scripts. + extra_k_files : list[str], default: [] + User-defined k files. """ # MechanicalSimulator handle dynain file from zerop MechanicsSimulator.simulate( @@ -788,7 +789,7 @@ def _write_main_simulation_files( def _kill_all_ansyscl(): - """Kill all ansys license clients.""" + """Kill all Ansys license clients.""" try: for p in psutil.process_iter(): if "ansyscl" in p.name(): @@ -808,11 +809,11 @@ def run_lsdyna( ---------- path_to_input : Path Input file for LS-DYNA. - settings : DynaSettings, optional - LS-DYNA settings, such as path to executable, executable type, - platform, by default ``None``. - simulation_directory : Path, optional - Directory where to simulate, by default ``None``. + settings : DynaSettings, default: None + LS-DYNA settings, such as path to the executable file, executable type, + and platform. + simulation_directory : Path, default: None + Directory for the simulation. """ if not settings: @@ -823,7 +824,7 @@ def run_lsdyna( os.chdir(os.path.dirname(path_to_input)) - #! Kill all ansys license clients prior to running LS-DYNA + #! Kill all Ansys license clients prior to running LS-DYNA #! this to avoid issues with orphan license clients of versions #! lower than the one needed by LS-DYNA. if _KILL_ANSYSCL_PRIOR_TO_RUN: diff --git a/src/ansys/health/heart/utils/__init__.py b/src/ansys/health/heart/utils/__init__.py index 48ab4f766..ee8c89371 100644 --- a/src/ansys/health/heart/utils/__init__.py +++ b/src/ansys/health/heart/utils/__init__.py @@ -19,4 +19,4 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Subpackage containing various utils.""" +"""Subpackage containing various utilities.""" diff --git a/src/ansys/health/heart/utils/connectivity.py b/src/ansys/health/heart/utils/connectivity.py index 6f1b3366b..55665973b 100644 --- a/src/ansys/health/heart/utils/connectivity.py +++ b/src/ansys/health/heart/utils/connectivity.py @@ -70,7 +70,7 @@ def face_tetra_connectivity(tetra: np.ndarray) -> Tuple[np.ndarray, np.ndarray, num_tetra = tetra.shape[0] faces_1 = np.reshape(faces.transpose(0, 2, 1), (4 * num_tetra, 3)) - # sort faces in order to find duplicates + # sort faces to find duplicates faces_sorted = np.sort(faces_1, axis=1) np.sort(faces_sorted, axis=0) @@ -88,11 +88,11 @@ def face_tetra_connectivity(tetra: np.ndarray) -> Tuple[np.ndarray, np.ndarray, tetra_ids = np.repeat(np.arange(0, num_tetra, 1), 4) tetra_ids_flip = np.flipud(tetra_ids) - # get connected tetra id for each face (two for interior, one for boundary face) + # get connected tetra ID for each face (two for interior and one for boundary face) c0 = tetra_ids[index][inverse] c1 = np.flip(tetra_ids_flip[index_r][inverse_r]) - # removing any duplicate faces + # remove any duplicate faces mapper = np.sort(index) faces_1 = faces_1[mapper, :] c0 = c0[mapper] @@ -105,19 +105,21 @@ def face_tetra_connectivity(tetra: np.ndarray) -> Tuple[np.ndarray, np.ndarray, def get_face_type(faces: np.ndarray, face_cell_connectivity: np.ndarray) -> np.ndarray: - """Establish face type. Either boundary faces or interior faces. + """Establish the face type, which indicates whether it is a boundary or an interior face. Parameters ---------- faces : np.ndarray - Array with face definitions + Array with face definitions. face_cell_connectivity : np.ndarray - Array describing to which cells each of the faces is connected to, e.g. np.array([c0, c1]) + Array describing the cells that each of the faces is connected to. + For example, ``np.array([c0, c1])``. Returns ------- np.ndarray - Type of face. Either interior (face_type = 1) or boundary (face_type = 2) + Type of face, which is either interior ``(face_type = 1)`` + or boundary ``(face_type = 2)``. """ interior_face_ids = face_cell_connectivity[:, 0] != face_cell_connectivity[:, 1] boundary_face_ids = face_cell_connectivity[:, 0] == face_cell_connectivity[:, 1] @@ -125,12 +127,14 @@ def get_face_type(faces: np.ndarray, face_cell_connectivity: np.ndarray) -> np.n face_types[interior_face_ids] = 1 face_types[boundary_face_ids] = 2 num_assigned = np.sum(boundary_face_ids) + np.sum(interior_face_ids) - assert num_assigned == faces.shape[0], "Not all faces assigned as either interior or boundary" + assert num_assigned == faces.shape[0], ( + "Not all faces are assigned as either interior or boundary." + ) return face_types def get_edges_from_triangles(triangles: np.ndarray) -> np.ndarray: - """Generate an array of edges from a array of triangles.""" + """Generate an array of edges from an array of triangles.""" num_triangles = triangles.shape[0] num_edges = num_triangles * 3 edges = np.repeat(triangles, 3, axis=0) @@ -151,14 +155,14 @@ def get_free_edges( Parameters ---------- triangles : np.ndarray - Array of triangles - return_free_triangles : bool, optional - Flag indicating whether to return the free triangles, by default False + Array of triangles. + return_free_triangles : bool, default: False + Whether to return the free triangles. Returns ------- free_edges : np.ndarray - Numpy array with the free edges + Numpy array with the free edges. free_triangles: np.ndarray, optional Numpy array with the triangles that use these free edges """ @@ -190,26 +194,34 @@ def edge_connectivity( Parameters ---------- edges : np.array - NumEdges x 2 Numpy array with edge definitions - return_type : bool, optional - Flag indicating whether to return the type of the edge group, by default False: - "open": edge group is open-ended - "closed": edge group forms closed edge loop - sort_closed : bool, optional - Flag indicating whether to sort any closed edge loops, by default False + NumEdges x 2 NumPy arrays with edge definitions. + return_type : bool, default: False + Whether to return the type of the edge group. If ``True``, the function + returns a list of strings with these types: + - "open": Edge group is open-ended. + - "closed": Edge group forms a closed edge loop. + sort_closed : bool, default: False + Whether to sort closed edge loops. Returns ------- edge_groups : np.ndarray Grouped edges by connectivity. group_types : list[str], optional - Type of edge group. 'open' ended or 'closed'. + Type of the edge group. Options are ``open`` or ``closed``. Notes ----- - Uses an implementation of a Depth-first search: https://en.wikipedia.org/wiki/Depth-first_search - https://www.educative.io/answers/how-to-implement-depth-first-search-in-python - Performance is not tested so may not be suitable for large arrays of edges. + This method uses an implementation of a depth-first search. For more information, + see: + + - `Depth-first search `_ + on the Wikipedia site. + - `How to implement depth-first search in Python + `_ + on the HowDev Answers site. + + Performance of this method is not tested. It might not be suitable for large arrays of edges. """ def _dfs(visited, graph, node): @@ -229,15 +241,15 @@ def _dfs(visited, graph, node): connected_nodes = edges.flatten()[mask.flatten()] graph[node] = connected_nodes - # check connectivity of each node using DFS. - # Group connected edges + # check connectivity of each node using DFS + # group connected edges node_ids_visited = np.zeros(node_ids.shape[0], dtype=bool) edge_groups = [] while not np.all(node_ids_visited): # keep track of visited nodes for this group of edges visited = set() - # node id to start from (finds first un-visited node) + # node ID to start from (finds first un-visited node) start_node_id = node_ids[np.where(np.invert(node_ids_visited))[0][0]] # call dept first algorithm to find connectivity @@ -301,8 +313,8 @@ def remove_triangle_layers_from_trimesh(triangles: np.ndarray, iters: int = 1) - ---------- triangles : np.ndarray Array of triangles. - iters : int, optional - Number of iterations, by default 1. + iters : int, default: 1 + Number of iterations. Returns ------- @@ -321,7 +333,7 @@ def remove_triangle_layers_from_trimesh(triangles: np.ndarray, iters: int = 1) - idx_triangles_boundary = np.any(np.isin(reduced_triangles, free_nodes), axis=1) - LOGGER.debug("Removing {0} connected triangles".format(np.sum(idx_triangles_boundary))) + LOGGER.debug("Removing {0} connected triangles...".format(np.sum(idx_triangles_boundary))) # remove boundary triangles reduced_triangles = reduced_triangles[~idx_triangles_boundary, :] diff --git a/src/ansys/health/heart/utils/download.py b/src/ansys/health/heart/utils/download.py index cd8187f74..13c280a29 100644 --- a/src/ansys/health/heart/utils/download.py +++ b/src/ansys/health/heart/utils/download.py @@ -124,26 +124,26 @@ def download_case_from_zenodo( Parameters ---------- database : str - name of the database. Either Strocchi2020 or Rodero2021. + name of the database. Options are ``'Strocchi2020'`` or ``'Rodero2021'``. case_number : int - case number to download. + Case number to download. download_folder : Path - path to the folder in which to download the case. + Path to the folder to download the case to. Returns ------- Path - Path to the tar ball that contains the vtk/case files. + Path to the tarball that contains the VTK/CASE files. Examples -------- - Download case 1 from the public repository (Strocchi2020) of pathological hearts. + Download case 1 from the public repository (``'Strocchi2020'``) of pathological hearts. >>> path_to_tar_file = download_case_from_zenodo( database="Strocchi2020", case_number=1, download_folder="my/download/folder" ) - Download case 1 from the public repository (Rodero2021) of 'healthy' hearts. + Download case 1 from the public repository (``'Rodero2021'``) of healthy hearts. >>> path_to_tar_file = download_case_from_zenodo( database="Rodero2021", case_number=1, download_folder="my/download/folder" @@ -169,7 +169,7 @@ def download_case_from_zenodo( try: download_url = _ALL_DOWNLOAD_URLS[database][case_number] except KeyError as e: - LOGGER.error(f"Case {case_number} not found in database {database}. {e}") + LOGGER.error(f"Case {case_number} is not found in database {database}. {e}") return None # validate URL @@ -209,7 +209,7 @@ def download_case_from_zenodo( LOGGER.warning("Not validating hash. Proceed at own risk") is_valid_file = True if not is_valid_file: - LOGGER.error("File data integrity can not be validated.") + LOGGER.error("File data integrity cannot be validated.") os.remove(save_path) return save_path @@ -232,7 +232,7 @@ def _validate_hash_sha256(file_path: Path, database: str, casenumber: int) -> bo def _infer_extraction_path_from_tar(tar_path: str | Path) -> str: - """Infer the path to the relevant .case or .vtk file from the tar_path.""" + """Infer the path to the relevant CASE or VTK file from the tarball path.""" tar_path = Path(tar_path) tarball = tarfile.open(tar_path) names = tarball.getnames() @@ -241,16 +241,20 @@ def _infer_extraction_path_from_tar(tar_path: str | Path) -> str: if not sub_path: sub_path = next((name for name in names if name.endswith(".vtk")), None) + if sub_path is None: + LOGGER.error(f"No relevant files are found in {tar_path}.") + return str(tar_path) + path = (tar_path.parent / sub_path).resolve() return str(path) def _get_members_to_unpack(tar_ball: tarfile.TarFile) -> list: - """Get the members to unpack from the tar ball. + """Get the members to unpack from the tarball. Notes ----- - This ignores the large .vtk for the Strocchi2020 archives. + This ignores the large VTK files for the Strocchi 2020 archives. """ if len(tar_ball.getnames()) > 1: members_to_unpack = [ @@ -262,14 +266,14 @@ def _get_members_to_unpack(tar_ball: tarfile.TarFile) -> list: def unpack_case(tar_path: Path, reduce_size: bool = True) -> str | bool: - r"""Unpack the downloaded tar file. + r"""Unpack the downloaded tarball file. Parameters ---------- tar_path : Path - Path to tar.gz file. + Path to TAR.GZ file. reduce_size : bool, default: True - If True, reduce the size of the unpacked files by removing the .vtk file for the + Whether to reduce the size of the unpacked files by removing the VTK file for the Strocchi database. Examples @@ -280,7 +284,7 @@ def unpack_case(tar_path: Path, reduce_size: bool = True) -> str | bool: Returns ------- str - Path to the .case or .vtk file + Path to the CASE or VTK file. """ try: tar_ball = tarfile.open(tar_path) @@ -293,7 +297,7 @@ def unpack_case(tar_path: Path, reduce_size: bool = True) -> str | bool: return path except Exception as exception: - LOGGER.error(f"Unpacking failed... {exception}") + LOGGER.error(f"Unpacking failed. {exception}") return False @@ -303,12 +307,12 @@ def download_all_cases(download_dir: str = None) -> list[str]: Parameters ---------- download_dir : str - Base directory where to download the cases to. + Base directory to download cases to. Examples -------- >>> from ansys.health.heart.utils.download import download_all_cases - >>> tar_files = download_call_cases("my-downloads") + >>> tar_files = download_all_cases("my-downloads") To unpack all cases you can use the unpack_cases method: >>> from ansys.health.heart.utils.download import unpack_cases @@ -316,7 +320,7 @@ def download_all_cases(download_dir: str = None) -> list[str]: Notes ----- - Note that downloading all cases may - depending on bandwidth - take substantial + Note that depending on bandwidth, downloading all cases might take a lot of time. """ @@ -338,12 +342,12 @@ def download_all_cases(download_dir: str = None) -> list[str]: def unpack_cases(list_of_tar_files: typing.List) -> None: - """Unpack a list of tar files. + """Unpack a list of TAR files. Parameters ---------- list_of_tar_files : typing.List - List of tar files to unpack. + List of TAR files to unpack. Examples -------- diff --git a/src/ansys/health/heart/utils/fluent_reader.py b/src/ansys/health/heart/utils/fluent_reader.py index 77ddc2a83..59d49e268 100644 --- a/src/ansys/health/heart/utils/fluent_reader.py +++ b/src/ansys/health/heart/utils/fluent_reader.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Module containing functions to read/write fluent meshes in HDF5 format.""" +"""Module containing functions to read and write Fluent meshes in HDF5 format.""" import h5py import numpy as np @@ -30,7 +30,7 @@ try: import pyvista as pv except ImportError: - print("Failed to import pyvista. Try installing pyvista with `pip install pyvista`.") + print("Failed to import PyVista. Try installing with 'pip install pyvista'.") class _FluentCellZone: @@ -40,24 +40,24 @@ def __init__( self, min_id: int = None, max_id: int = None, name: str = None, cid: int = None ) -> None: self.min_id: int = min_id - """Min cell id of the cell zone: indexing starts at 0.""" + """Minimum cell ID of the cell zone. Indexing starts at 0.""" self.max_id: int = max_id - """Max cell id of the cell zone: indexing starts at 0.""" + """Maximum cell ID of the cell zone. Indexing starts at 0.""" self.name: str = name """Name of the cell zone.""" self.id: int = cid - """Id of the cell zone.""" + """ID of the cell zone.""" self.cells: np.ndarray = None """Array of cells for this cell zone.""" return def get_cells(self, all_cells: np.ndarray) -> None: - """Select the cells between min and max id. + """Select the cells between the minimum and maximum ID. Notes ----- - Requires list of all cells. + A list of all cells is required. """ self.cells = all_cells[self.min_id : self.max_id + 1, :] @@ -79,21 +79,21 @@ def __init__( c0c1: np.ndarray = None, ) -> None: self.min_id: int = min_id - """Min face id of the face zone: indexing starts at 0.""" + """Minimum face ID of the face zone. Indexing starts at 0.""" self.max_id: int = max_id - """Max face id of the face zone: indexing starts at 0.""" + """Maximum face ID of the face zone. Indexing starts at 0.""" self.name: str = name """Name of the face zone.""" self.id: int = zone_id - """Id of the face zone.""" + """ID of the face zone.""" self.zone_type: str = zone_type - """Type of face zone.""" + """Type of the face zone.""" self.faces: np.ndarray = faces - """Array of faces for this face zone.""" + """Array of faces for the face zone.""" self.c0c1: np.ndarray = c0c1 - """Array that stores connected cell-ids.""" + """Array that stores connected cell IDs.""" self.hdf5_id = hdf5_id - """Id of face zone in hdf5 file.""" + """ID of the face zone in the HDF5 file.""" return @@ -113,19 +113,19 @@ def face_zone_names(self): @property def cell_zone_id_to_name(self): - """Cell-zone id to name mapping.""" + """Cell zone ID to name mapping.""" return {cz.id: cz.name for cz in self.cell_zones if cz is not None} @property def face_zone_id_to_name(self): - """Face-zone id to name mapping.""" + """Face zone ID to name mapping.""" return {fz.id: fz.name for fz in self.face_zones if fz is not None} def __init__(self, filename: str = None) -> None: self.filename: str = filename """Path to file.""" self.fid: h5py.File = None - """File id to h5py file.""" + """File iID to H5PY file.""" self.nodes: np.ndarray = None """All nodes of the mesh.""" self.faces: np.ndarray = None @@ -133,20 +133,20 @@ def __init__(self, filename: str = None) -> None: self.cells: np.ndarray = None """All cells.""" self.cell_ids: np.ndarray = None - """Array of cell ids use to define the cell zones.""" + """Array of cell IDs used to define the cell zones.""" self.cell_zones: list[_FluentCellZone] = [] """List of cell zones.""" self.face_zones: list[_FluentFaceZone] = [] """List of face zones.""" self._unique_map: np.ndarray = None - """Map to go from full node list to node-list without duplicates.""" + """Map to go from the full node list to the node list without duplicates.""" pass def load_mesh(self, filename: str = None, reconstruct_tetrahedrons: bool = True) -> None: - """Load the mesh from the hdf5 file.""" + """Load the mesh from the HDF5 file.""" if not filename and not self.filename: - raise FileNotFoundError("Please specify a file to read") + raise FileNotFoundError("Specify a file to read.") if self.filename: filename = self.filename @@ -193,7 +193,7 @@ def clean(self) -> None: return def _remove_duplicate_nodes(self) -> None: - """Remove duplicate nodes and remaps the face zone definitions.""" + """Remove duplicate nodes and remap the face zone definitions.""" self._unique_nodes, _, self._unique_map = np.unique( self.nodes, axis=0, @@ -217,10 +217,10 @@ def _set_cells_in_cell_zones(self) -> list[_FluentCellZone]: def _open_file(self, filename: str = None) -> h5py.File: """Open the file for reading.""" if not filename: - raise ValueError("Please specify input file") + raise ValueError("Specify the input file.") if filename[-7:] != ".msh.h5": - raise FileNotFoundError("File does not have extension '.msh.h5'") + raise FileNotFoundError("File does not have extension '.msh.h5'.") self.fid = h5py.File(filename, "r") return self.fid @@ -231,7 +231,7 @@ def _close_file(self) -> None: return def _read_nodes(self) -> None: - """Read the node field(s).""" + """Read the node fields.""" self.nodes = np.zeros((0, 3), dtype=float) for ii in np.array(self.fid["meshes/1/nodes/coords"]): self.nodes = np.vstack([self.nodes, np.array(self.fid["meshes/1/nodes/coords/" + ii])]) @@ -290,7 +290,7 @@ def _read_all_faces_of_face_zones(self) -> list[_FluentFaceZone]: subdir2 = "meshes/1/faces/nodes/" + str(face_zone.hdf5_id) + "/nnodes" nnodes = np.array(self.fid[subdir2], dtype=int) if not np.all(nnodes == 3): - raise ValueError("Only triangular meshes supported") + raise ValueError("Only triangular meshes are supported.") node_ids = np.array(self.fid[subdir], dtype=int) num_triangles = int(len(node_ids) / 3) @@ -299,7 +299,7 @@ def _read_all_faces_of_face_zones(self) -> list[_FluentFaceZone]: return self.face_zones def _read_c0c1_of_face_zones(self) -> list[_FluentFaceZone]: - """Read the cell connectivity of the face zone. Only do for interior cells.""" + """Read the cell connectivity of the face zone. Only do this for interior cells.""" for face_zone in self.face_zones: subdir0 = "meshes/1/faces/c0/" + str(face_zone.hdf5_id) subdir1 = "meshes/1/faces/c1/" + str(face_zone.hdf5_id) @@ -309,14 +309,14 @@ def _read_c0c1_of_face_zones(self) -> list[_FluentFaceZone]: return self.face_zones def _convert_interior_faces_to_tetrahedrons(self) -> tuple[np.ndarray, np.ndarray]: - """Use c0c1 matrix to get tetrahedrons. + """Use the c0c1 matrix to get tetrahedrons. Notes ----- f1: n1 n2 n3 c0 c1 f2: n3 n1 n4 c0 c1 - If f1 and f2 are connected to same face - extract node not occurring in + If f1 and f2 are connected to the same face, extract the node not occurring in f1. The resulting four nodes will make up the tetrahedron Do this for all faces. @@ -353,7 +353,7 @@ def _convert_interior_faces_to_tetrahedrons(self) -> tuple[np.ndarray, np.ndarra mask = np.invert(mask) if not np.all(np.sum(mask, axis=1) == 1): - raise ValueError("The two faces do not seem to be connected with two nodes") + raise ValueError("The two faces do not seem to be connected with the two nodes.") tetrahedrons = np.hstack([f1, f2[mask][:, None]]) @@ -364,21 +364,21 @@ def _convert_interior_faces_to_tetrahedrons(self) -> tuple[np.ndarray, np.ndarra return tetrahedrons, cell_ids - # NOTE: no typehint due to lazy import of pyvista + # NOTE: no typehint due to lazy import of PpyVista def _to_vtk(self, add_cells: bool = True, add_faces: bool = False) -> pv.UnstructuredGrid: - """Convert mesh to vtk unstructured grid or polydata. + """Convert the mesh to VTK unstructured grid or polydata. Parameters ---------- - add_cells : bool, optional - Whether to add cells to the vtk object, by default True - add_faces : bool, optional - Whether to add faces to the vtk object, by default False + add_cells : bool, default: True + Whether to add cells to the VTK object. + add_faces : bool, default: False + Whether to add faces to the VTK object. Returns ------- pv.UnstructuredGrid - Unstructured grid representation of the fluent mesh. + Unstructured grid representation of the Fluent mesh. """ if add_cells and add_faces: add_both = True @@ -431,12 +431,12 @@ def _to_vtk(self, add_cells: bool = True, add_faces: bool = False) -> pv.Unstruc return grid def _fix_negative_cells(self) -> None: - """Rorder base face in cells that have a negative cell volume. + """Rorder the base face in cells that have a negative cell volume. Notes ----- - For a positive volume the base face (n1, n2, n3) needs to point in direction - of n4. Hence, swapping the order to (n3, n2, n1) fixes negative cell volumes. + For a positive volume, the base face ``(n1, n2, n3)`` must point in the direction + of ``n4``. Hence, swapping the order to ``(n3, n2, n1)`` fixes negative cell volumes. """ grid = self._to_vtk(add_cells=True, add_faces=False) grid = grid.compute_cell_sizes(length=False, area=False, volume=True) @@ -453,7 +453,7 @@ def _fix_negative_cells(self) -> None: return def _remove_empty_cell_zones(self) -> None: - """Remove empty cell zones from cell zone list.""" + """Remove empty cell zones from the cell zone list.""" self.cell_zones = [cz for cz in self.cell_zones if cz.cells.shape[0] > 0] return @@ -462,8 +462,8 @@ def _merge_face_zones_based_on_connectivity(self, face_zone_separator: str = ":" Notes ----- - This method is useful when mesh is split into multiple unconnected face zones with - the same name. Fluent uses a colon as an identifier when separating these face-zones. + This method is useful when the mesh is split into multiple unconnected face zones with + the same name. Fluent uses a colon as an identifier when separating these face zones. """ idx_to_remove = [] for ii, fz in enumerate(self.face_zones): diff --git a/src/ansys/health/heart/utils/landmark_utils.py b/src/ansys/health/heart/utils/landmark_utils.py index f2c286a0d..5ccd3ab4d 100644 --- a/src/ansys/health/heart/utils/landmark_utils.py +++ b/src/ansys/health/heart/utils/landmark_utils.py @@ -43,19 +43,19 @@ def compute_anatomy_axis( Parameters ---------- mv_center : np.ndarray - Mitral valve center + Mitral valve center. av_center : np.ndarray - Aortic valve center + Aortic valve center. apex : np.ndarray - left ventricle epicardium apex point - first_cut_short_axis : float, optional - relative distance between mv_center and apex, by default 0.2 - Used for defining the center of the short axis + Left ventricle epicardium apex point. + first_cut_short_axis : float, default: 0.2 + Relative distance between the mitral valve center and apex, + which is used for defining the center of the short axis. Returns ------- tuple[dict, dict, dict] - 4CV,2CV and short-axis, each dict contains `center` and `normal` + 4CV, 2CV, and short-axis. Each dictionary contains ``center`` and ``normal``. """ # long 4CAV axis: cross apex, mitral and aortic valve centers center = np.mean(np.array([av_center, mv_center, apex]), axis=0) @@ -86,26 +86,26 @@ def compute_aha17( seg: Literal[16, 17] = 17, p_junction: np.ndarray = None, ) -> np.ndarray: - """Compute AHA17 label for left ventricle elements. + """Compute the AHA17 label for left ventricle elements. Parameters ---------- model : HeartModel - heart model + Heart model. short_axis : dict - short axis + Short axis. l4cv_axis : dict - long 4cv axis - seg : Literal[16, 17], optional - compute 16 or 17 segments, by default 17 - p_junction : np.ndarray, optional - LV and RV junction points, if given, it defines the start of segment 1, by default None - If not given, start point is defined by rotating 60 degrees from 4CV axis + Long 4CV axis. + seg : Literal[16, 17], default: 17 + Compute 16 or 17 segments. + p_junction : np.ndarray, default: None + LV and RV junction points. If these points are given, they defines the start of segment 1. + If they are not given, the start point is defined by rotating 60 degrees from the 4CV axis. Returns ------- np.ndarray - AHA17 ids, no concerned elements will be assigned with np.nan + AHA17 IDs. No concerned elements are assigned with ``np.nan``. """ aha_ids = np.full(len(model.mesh.tetrahedrons), np.nan) @@ -115,7 +115,7 @@ def compute_aha17( except AttributeError: ele_ids = np.hstack(model.left_ventricle.element_ids) - # elements center + # element's center elem_center = np.mean(model.mesh.points[model.mesh.tetrahedrons[ele_ids]], axis=1) # anatomical points @@ -152,7 +152,7 @@ def compute_aha17( p1_3 = 1 / 3 * (apex_ep - p_highest) + p_highest p2_3 = 2 / 3 * (apex_ep - p_highest) + p_highest - # In order to have a flat segment 17, project endocardical apex point on short axis + # to have a flat segment 17, project endocardical apex point on short axis x = apex_ed - apex_ep y = p_highest - apex_ep apex_ed = y * np.dot(x, y) / np.dot(y, y) + apex_ep @@ -232,7 +232,7 @@ def compute_aha17( return aha_ids -@deprecated(reason="Will use gradient from UVC to get better results") +@deprecated(reason="Using gradient from UVC to get better results.") def compute_element_cs( model: HeartModel, short_axis: dict, aha_element: np.ndarray ) -> tuple[np.ndarray, np.ndarray, np.ndarray]: @@ -241,16 +241,16 @@ def compute_element_cs( Parameters ---------- model : HeartModel - heart model + Heart model. short_axis : dict - short axis + Short axis. aha_element : np.ndarray - elements with AHA labels, compute only on these elements + Elements with AHA labels. Compute only on these elements. Returns ------- tuple[np.ndarray, np.ndarray, np.ndarray] - longitudinal, radial, circufenrential vecotors of each AHA element + Longitudinal, radial, and circumferential vectors of each AHA element. """ elems = model.mesh.tetrahedrons[aha_element] elem_center = np.mean(model.mesh.points[elems], axis=1) diff --git a/src/ansys/health/heart/utils/misc.py b/src/ansys/health/heart/utils/misc.py index 7080a0e49..a666fdfaf 100644 --- a/src/ansys/health/heart/utils/misc.py +++ b/src/ansys/health/heart/utils/misc.py @@ -36,15 +36,14 @@ def clean_directory( extensions_to_remove: list[str] = [".stl", ".vtk", ".msh.h5"], remove_all: bool = False, ) -> None: - """Remove files with extension present in the working directory. + """Remove files from the working directory with given extensions. Parameters ---------- - extensions_to_remove : List[str], optional - List of extensions to remove, by default [".stl", ".vtk", ".msh.h5"] - remove_all: bool, optional - Flag indicating whether to remove files with any extension. - Keeps files/folder without extension + extensions_to_remove : List[str], default: ``['.stl', '.vtk', '.msh.h5']`` + List of extensions to remove. + remove_all: bool, default: False + Whether to remove files with any extension. Files without extensions are kept. """ import glob as glob @@ -69,9 +68,9 @@ def model_summary(model: HeartModel, attributes: list = None) -> dict: Parameters ---------- model : HeartModel - HeartModel for which to generate the summary dictionary + Heart model to generate the summary dictionary for. attributes : list - List of attributes to try to add to the dict. + List of attributes to add to the dictionary. Returns ------- @@ -85,7 +84,7 @@ def model_summary(model: HeartModel, attributes: list = None) -> dict: sum_dict["GENERAL"]["total_num_tets"] = model.mesh.tetrahedrons.shape[0] sum_dict["GENERAL"]["total_num_nodes"] = model.mesh.points.shape[0] except TypeError as error: - LOGGER.error(f"Failed to format General model information. {error}") + LOGGER.error(f"Failed to format general model information. {error}") sum_dict["PARTS"] = {} sum_dict["CAVITIES"] = {} @@ -141,7 +140,7 @@ def model_summary(model: HeartModel, attributes: list = None) -> dict: def rodrigues_rot(points: np.ndarray, n0: np.ndarray, n1: np.ndarray) -> np.ndarray: - """Perform rodrigues rotation. + """Perform Rodrigues rotation. Parameters ---------- @@ -155,7 +154,7 @@ def rodrigues_rot(points: np.ndarray, n0: np.ndarray, n1: np.ndarray) -> np.ndar Notes ----- Rotate given points based on a starting and ending vector. - Axis k and angle of rotation theta given by vectors n0,n1. + Axis k and angle of rotation theta is given by vectors ``n0,n1``. P_rot = P*cos(theta) + (k x P)*sin(theta) + k**(1-cos(theta)) Returns @@ -163,7 +162,7 @@ def rodrigues_rot(points: np.ndarray, n0: np.ndarray, n1: np.ndarray) -> np.ndar np.ndarray Rotated points. """ - # If P is only 1d array (coords of single point), fix it to be matrix + # If P is only a 1D array (coordinates of a single point), fix it to be a matrix if points.ndim == 1: points = points[np.newaxis, :] @@ -192,12 +191,14 @@ def project_3d_points(p_set: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.nda Parameters ---------- p_set : np.ndarray - Point set, Nx3 + Point set, Nx3. Notes ----- - Uses SVD to find representative plane: - https://meshlogic.github.io/posts/jupyter/curve-fitting/fitting-a-circle-to-cluster-of-3d-points/ + This method uses SVD to find representative plane. For more information, + see the `Fitting a circle to cluster of 3D points + `_ + blog on the MESHLOGIC site. Returns ------- @@ -216,12 +217,12 @@ def project_3d_points(p_set: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.nda normal = vector[2, :] # ------------------------------------------------------------------------------- - # (2) Project points to coords X-Y in 2D plane + # (2) Project points to coordinates X-Y in the 2D plane # ------------------------------------------------------------------------------- points_xy = rodrigues_rot(point_centered, normal, [0, 0, 1]) # ------------------------------------------------------------------------------- - # (2) Project points back to the original CS + # (2) Project points back to the original coordinate system # ------------------------------------------------------------------------------- point_projected = np.zeros(p_set.shape) for i in range(len(points_xy)): @@ -289,7 +290,7 @@ def generate_specific_rows(file, row_indices): def _slerp(v0: np.ndarray, v1: np.ndarray, t: float) -> np.ndarray: - """Spherical Linear Interpolation between two unit vectors v0 and v1.""" + """Spherical Linear Interpolation between two unit vectors, v0 and v1.""" # Compute dot product and clamp to handle numerical issues dot = np.dot(v0, v1) dot = np.clip(dot, -1.0, 1.0) @@ -315,11 +316,11 @@ def interpolate_slerp( Parameters ---------- source_pos : np.ndarray - N x 3 array of source points coordinates + N x 3 array of source points coordinates. source_vec : np.ndarray - N x 3 array of source vectors + N x 3 array of source vectors. target_pos : np.ndarray - M x 3 array of target points coordinates + M x 3 array of target points coordinates. Returns ------- @@ -329,7 +330,7 @@ def interpolate_slerp( # legal test norm = np.linalg.norm(source_vec, axis=1) if not np.allclose(norm, 1.0): - raise TypeError("slerp interpolation must be used for unit vectors.") + raise TypeError("Slerp interpolation must be used for unit vectors.") # Build a KD-tree once tree = cKDTree(source_pos) @@ -340,14 +341,14 @@ def interpolate_with_k_nearest(query_point: np.ndarray, k: int = 4) -> np.ndarra Parameters ---------- query_point : np.ndarray - query point coordinate - k : int, optional - no. of nearest points to be used, by default 4 + Query point coordinate. + k : int, default: 4 + Number of nearest points to use. Returns ------- np.ndarray - vector on query point + Vector on query point. """ # Find the k-nearest neighbors distances, indices = tree.query(query_point, k=k) diff --git a/src/ansys/health/heart/utils/vtk_utils.py b/src/ansys/health/heart/utils/vtk_utils.py index f65e23a12..309c21373 100644 --- a/src/ansys/health/heart/utils/vtk_utils.py +++ b/src/ansys/health/heart/utils/vtk_utils.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Module contains methods for mesh operations related to the vtk library.""" +"""Module contains methods for mesh operations related to the VTK library.""" import copy @@ -37,16 +37,16 @@ def compute_surface_nodal_area_pyvista(surface: pv.PolyData) -> np.ndarray: Parameters ---------- vtk_surface : vtk.vtkPolyData - Vtk object describing the object + VTK object describing the object. Returns ------- np.array - Numpy array with nodal areas of length number of points + NumPy array with nodal areas of length number of points. Notes ----- - Adds the partial areas of connected elements/cells to each node. + This method adds the partial areas of connected elements/cells to each node. """ num_points = surface.n_points @@ -79,16 +79,16 @@ def extrude_polydata( ---------- surface : pv.PolyData Surface to extrude - extrude_by : float, optional - Extrude by this much, by default 1 - extrude_direction : np.array, optional - Direction of extrusion, should have three components if not specified - extrudes in normal direction + extrude_by : float, default: 1 + Amount to extrude. + extrude_direction : np.array, default: np.empty(0) + Direction of extrusion, which should have three components. If no + components are specified, it extrudes in the normal direction. Returns ------- pv.PolyData - Extruded PolyData object + Extruded PolyData object. """ extrude_normal = False if len(extrude_direction) == 0: @@ -117,27 +117,27 @@ def extrude_polydata( def cell_ids_inside_enclosed_surface( source: pv.UnstructuredGrid | pv.PolyData, surface: pv.PolyData, tolerance: float = 1e-9 ) -> np.ndarray: - """Get cell ids of cells of which the centroids are inside a given surface. + """Get IDs of cells with centroids that are inside a given surface. Parameters ---------- source : pv.UnstructuredGrid - Source object of which to check which cells are inside/outside - the specified surface + Source object to check which cells are inside/outside + the specified surface. surface : pv.PolyData - Surface used to check whether cells are inside/outside. - tolerance : float, optional - Tolerance for the select_enclosed_points filter, by default 1e-9 + Surface to check whether cells are inside/outside. + tolerance : float, default: 1e-9 + Tolerance for the ``select_enclosed_points`` filter. Notes ----- - This also accepts a source that represent the cell centroids. In this case we can skip computing - the cell_centers. + This method also accepts a source that represents the cell centroids. + In this case, computing the cell centers is skipped. Returns ------- np.ndarray - Array with cell ids that are inside the enclosed surface. + Array with cell IDs that are inside the enclosed surface. """ surface = surface.compute_normals() if isinstance(source, pv.PolyData) and source.n_verts == source.n_points: @@ -157,16 +157,16 @@ def find_cells_close_to_nodes( Parameters ---------- mesh : pv.UnstructuredGrid - target mesh + Target mesh. node_ids : list[int] - node IDs - radius : float, optional - influence radius, by default 2 + Node IDs. + radius : float, default: 2 + Influence radius. Returns ------- np.ndarray - cell IDs + Cell IDs. """ # Get coordinates of the given node IDs points = mesh.points[node_ids] @@ -179,7 +179,7 @@ def find_cells_close_to_nodes( # Create a sphere at the given point sphere = pv.Sphere(radius=radius, center=point) - # Use boolean intersection to find cells that intersect with the sphere + # Use Boolean intersection to find cells that intersect with the sphere selection = mesh.select_enclosed_points(sphere, tolerance=0.0) # Get the indices of the cells @@ -227,18 +227,18 @@ def get_boundary_edge_loops( Parameters ---------- surface : pv.PolyData - Surface mesh to check for boundary edges - remove_open_edge_loops : bool, optional - Removes open edge loops from the return dictionary, by default True + Surface mesh to check for boundary edges. + remove_open_edge_loops : bool, default: True + Whether to remove open edge loops from the returned dictionary. Returns ------- dict - dictionary with the edges that make up the open/closed loop + Dictionary with the edges that make up the open/closed loop. """ # NOTE: Perhaps more consistent to return a pyvista polydata. - # add cell and point ids to keep track of ids. + # add cell and point IDs to keep track of IDs. surface1 = copy.deepcopy(surface) surface1.cell_data["original-cell-ids"] = np.arange(0, surface1.n_cells) surface1.point_data["original-point-ids"] = np.arange(0, surface1.n_points) @@ -246,13 +246,13 @@ def get_boundary_edge_loops( # get boundary edges separated by connectivity edges_block = get_boundary_edges(surface1) - # lines formed with original point ids + # lines formed with original point IDs edge_groups = { k: edges.point_data["original-point-ids"][edges.cells_dict[3]] for k, edges in enumerate(edges_block) } - # check if it is a closed or open edge-loop, remove open ones. + # check if it is a closed or open edge loop. Remove open ones. group_types = {} closed_edge_groups = {} for k, edge_group in edge_groups.items(): @@ -276,15 +276,14 @@ def get_boundary_edge_loops( def get_patches_delaunay(surface: pv.PolyData, closed_only: bool = True) -> list[pv.PolyData]: - """Patch boundary edges with a delaunay algorithm. + """Patch boundary edges with a Delaunay algorithm. Parameters ---------- surface : pv.PolyData - Surface with boundary edges for which to find patches. - closed_only : bool - Flag indicating whether to return patches for closed loops of boundary edges, - by default True + Surface with boundary edges to find patches for. + closed_only : bool, default: True + Whether to return patches for closed loops of boundary edges. Returns ------- @@ -326,14 +325,13 @@ def get_patches_with_centroid( Parameters ---------- surface : pv.PolyData - Surface with boundary edges for which to find patches. - closed_only : bool - Flag indicating whether to return patches for closed loops of boundary edges, - by default True + Surface with boundary edges to find patches for. + closed_only : bool, default: True + Whether to return patches for closed loops of boundary edges. Notes ----- - Edges need to be sorted properly for this method to return sensible patches. + Edges must be sorted properly for this method to return sensible patches. Returns ------- @@ -378,7 +376,7 @@ def get_patches_with_centroid( def are_connected( mesh1: pv.PolyData | pv.UnstructuredGrid, mesh2: pv.PolyData | pv.UnstructuredGrid ) -> bool: - """Check whether two PolyData or UnstructuredGrids are connected. + """Check whether two ``PolyData`` or ``UnstructuredGrids`` objects are connected. Parameters ---------- @@ -415,11 +413,11 @@ def are_connected( def add_solid_name_to_stl(filename, solid_name, file_type: str = "ascii") -> None: - """Add name of solid to stl file. + """Add name of the solid to STL file. Notes ----- - Supports only single block. + This method supports only a single block. """ if file_type == "ascii": @@ -445,54 +443,54 @@ def add_solid_name_to_stl(filename, solid_name, file_type: str = "ascii") -> Non def find_corresponding_points( - master_surface: pv.PolyData, slave_surface: pv.PolyData, distance: float = 20 + first_surface: pv.PolyData, second_surface: pv.PolyData, distance: float = 20 ) -> np.ndarray: """Find corresponding points between two surfaces. Parameters ---------- - master_surface : pv.PolyData - first surface - slave_surface : pv.PolyData - second surface + first_surface : pv.PolyData + First surface. + second_surface : pv.PolyData + Second surface. distance : float - approximate largest distance between two surfaces + Approximate largest distance between two surfaces. Returns ------- np.ndarray 2*N array - first row is node IDs of master surface, - second row is corresponding node IDs on the slave surface - None if no corresponding node is found + The first row contains node IDs of the first surface. + The second row contains corresponding node IDs on the second surface. + If no corresponding node is found, ``None`` is returned. Notes ----- - Uses ray tracing. + This method uses ray tracing. The two surfaces are assumed to be close and nearly parallel. - As a result, the correspondence is not one-to-one—some points may - have no corresponding match, while others may share the same + As a result, the correspondence is not one to one. Some points might + have no corresponding match, while others might share the same corresponding point. """ - # NOTE: using UVC coordinates leads to a shift in + # NOTE: Using UVCs lead to a shift in the # longitudinal direction from epicardium to endocardium and is thus not an option. - # Compute normal of master surface - master_surface.compute_normals(inplace=True) + # Compute normal of the first surface + first_surface.compute_normals(inplace=True) - points_m = master_surface.points - normals_m = master_surface.point_data["Normals"] + points_m = first_surface.points + normals_m = first_surface.point_data["Normals"] # corresponding points corresp_points = [] - tree_s = slave_surface.find_closest_point + tree_s = second_surface.find_closest_point # Find intersections for i in range(len(points_m)): start_point = points_m[i] direction = normals_m[i] # Cast a ray along the normal direction - intersection, _ = slave_surface.ray_trace( + intersection, _ = second_surface.ray_trace( start_point - direction * distance, start_point + direction * distance ) @@ -505,7 +503,7 @@ def find_corresponding_points( else: corresp_points.append(None) # fill None for no corresponding point - return np.vstack((range(0, master_surface.n_points), corresp_points)) + return np.vstack((range(0, first_surface.n_points), corresp_points)) def generate_thickness_lines( @@ -517,16 +515,16 @@ def generate_thickness_lines( Parameters ---------- surface1 : pv.PolyData - master surface + First surface. surface2 : pv.PolyData - slave surface - res : np.ndarray, optional - corresponding points array, default None + Second surface. + corresponding_points : np.ndarray, default: None + Corresponding points array. Returns ------- pv.PolyData - it contains cell data named 'thickenss'. + Object contains cell data named ``thickness``. """ if corresponding_points is None: corresponding_points = find_corresponding_points(surface1, surface2) diff --git a/src/ansys/health/heart/writer/custom_keywords/em_kws.json b/src/ansys/health/heart/writer/custom_keywords/em_kws.json index 604e76a1a..21f70a5cb 100644 --- a/src/ansys/health/heart/writer/custom_keywords/em_kws.json +++ b/src/ansys/health/heart/writer/custom_keywords/em_kws.json @@ -8,7 +8,7 @@ "default": null, "position": 0, "width": 10, - "help": "Part set on which the system is solved" + "help": "Part set that the system is solved on." }, { "name": "numSplitI", @@ -16,7 +16,7 @@ "default": null, "position": 10, "width": 10, - "help": "ID of the Laplace system that is solved in the transmural direction" + "help": "ID of the Laplace system that is solved in the transmural direction." }, { "name": "actuSigma", @@ -38,7 +38,7 @@ "default": null, "position": 0, "width": 10, - "help": "Part set on which the system is solved" + "help": "Part set that the system is solved on." }, { "name": "solvid1", @@ -46,7 +46,7 @@ "default": null, "position": 10, "width": 10, - "help": "ID of the Laplace system that is solved in the transmural direction" + "help": "ID of the Laplace system that is solved in the transmural direction." }, { "name": "solvid2", @@ -62,7 +62,7 @@ "default": null, "position": 30, "width": 10, - "help": "helical angle with respect to the counterclockwise circumferential direction in the heart when looking from the base towards the apex. If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 1- for available parameters" + "help": "Helical angle with respect to the counterclockwise circumferential direction in the heart when looking from the base towards the apex. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 1- for available parameters." }, { "name": "beta", @@ -70,7 +70,7 @@ "default": null, "position": 40, "width": 10, - "help": "angle with respect to the outward transmural axis of the heart. If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 1- for available parameters" + "help": "Angle with respect to the outward transmural axis of the heart. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 1- for available parameters." }, { "name": "wfile", @@ -78,7 +78,7 @@ "default": null, "position": 50, "width": 10, - "help": "Selects whether result files (ELEMENT_SOLID_ORTHO.k and vtk files) are exported. Eq 0: not exported. Eq 1: exported" + "help": "Selects whether result files (ELEMENT_SOLID_ORTHO.k and VTK files) are exported. Eq 0: not exported. Eq 1: exported" }, { "name": "prerun", @@ -86,7 +86,7 @@ "default": null, "position": 60, "width": 10, - "help": "Selects whether the run is stopped after creating fibers: Eq. 0: do not stop after fiber creation. Eq 1: stop after fiber creation" + "help": "Selects whether the run is stopped after creating fibers: Eq. 0: do not stop after fiber creation. Eq 1: stop after fiber creation." } ] } @@ -101,7 +101,7 @@ "position": 0, "width": 10, "link": 14, - "help": "Material ID: refers to MID in the *PART card." + "help": "Material ID:, which refers to MID in the *PART card." }, { "name": "MTYPE", @@ -123,7 +123,7 @@ "default": null, "position": 20, "width": 10, - "help": "The 1,1 term in the 3 x 3 electromagnetic conductivity tensor matrix. Note that 1 corresponds to the a material direction.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 1,1 term in the 3 x 3 electromagnetic conductivity tensor matrix. Note that 1 corresponds to the a material direction. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA22", @@ -131,7 +131,7 @@ "default": null, "position": 30, "width": 10, - "help": "The 2,2 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 2,2 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA33", @@ -139,7 +139,7 @@ "default": null, "position": 40, "width": 10, - "help": "The 2,2 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters. " + "help": "The 2,2 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters. " }, { "name": "BETA", @@ -155,7 +155,7 @@ "default": 1.0, "position": 60, "width": 10, - "help": "Membrane capacitance" + "help": "Membrane capacitance." } ] }, @@ -167,7 +167,7 @@ "default": null, "position": 0, "width": 10, - "help": "The 1,2 term in the 3 x 3 electromagnetic conductivity tensor matrix.Note that 2 corresponds to the b material direction.. If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 1,2 term in the 3 x 3 electromagnetic conductivity tensor matrix. Note that 2 corresponds to the b material direction. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA13", @@ -175,7 +175,7 @@ "default": null, "position": 10, "width": 10, - "help": "The 1,3 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 1,3 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA21", @@ -183,7 +183,7 @@ "default": null, "position": 20, "width": 10, - "help": "The 2,1 term in the 3 x 3 electromagnetic conductivity tensor matrix. Note that 1 corresponds to the a material direction.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 2,1 term in the 3 x 3 electromagnetic conductivity tensor matrix. Note that 1 corresponds to the a material direction. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA23", @@ -191,7 +191,7 @@ "default": null, "position": 30, "width": 10, - "help": "The 2,3 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 2,3 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA31", @@ -199,7 +199,7 @@ "default": null, "position": 40, "width": 10, - "help": "The 3,1 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 3,1 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "SIGMA32", @@ -207,7 +207,7 @@ "default": null, "position": 50, "width": 10, - "help": "The 3,2 term in the 3 x 3 electromagnetic conductivity tensor matrix.If a negative value is entered, a *DEFINE_FUNCTION will be expected. See remark 3- for available parameters." + "help": "The 3,2 term in the 3 x 3 electromagnetic conductivity tensor matrix. If a negative value is entered, a *DEFINE_FUNCTION is expected. See remark 3- for available parameters." }, { "name": "AOPT", @@ -223,7 +223,7 @@ "default": "0", "position": 60, "width": 10, - "help": "Material axes option:\n EQ.0.0: locally orthotropic with material axes determined by element nodes\n EQ.1.0:locally orthotropic with material axes determined by a point in space and the global location of the element center this is the a-direction.\n EQ.2.0:globally orthotropic with material axes determined by vectors defined below, as with *DEFINE_COORDINATE_VECTOR. \n EQ.3.0:locally orthotropic material axes determined by rotating the material axes about the element normal by an angle,BETA, from a line in the plane of the element defined by the cross product of the vector v with the element normal. The plane of a solid element is the midsurface between the inner surface and outer surface defined by the first four nodes and the last four nodes of the connectivity of the element, respectively.\n EQ.4.0:locally orthotropic in cylindrical coordinate system with the material axes determined by a vector v, and an originating point, P, which define the centerline axis. This option is for solid elements only. \n EQ.5.0:globally defined reference frame with (a,b,c)=(X0,Y0,Z0)." + "help": "Material axes option:\n EQ.0.0: locally orthotropic with material axes determined by element nodes\n EQ.1.0:locally orthotropic with material axes determined by a point in space and the global location of the element center. This is the a-direction.\n EQ.2.0:globally orthotropic with material axes determined by vectors defined below, as with *DEFINE_COORDINATE_VECTOR. \n EQ.3.0:locally orthotropic material axes determined by rotating the material axes about the element normal by an angle,BETA, from a line in the plane of the element defined by the cross product of the vector v with the element normal. The plane of a solid element is the midsurface between the inner surface and outer surface defined by the first four nodes and the last four nodes of the connectivity of the element, respectively.\n EQ.4.0:locally orthotropic in cylindrical coordinate system with the material axes determined by a vector v, and an originating point, P, which define the centerline axis. This option is for solid elements only. \n EQ.5.0:globally defined reference frame with (a,b,c)=(X0,Y0,Z0)." }, { "name": "LAMBDA", @@ -231,7 +231,7 @@ "default": null, "position": 70, "width": 10, - "help": "Intra- to extracellular conductivity ratio. When non-empty, the elliptic equation is solved to compute extracellular potentials" + "help": "Intra- to extracellular conductivity ratio. When non-empty, the elliptic equation is solved to compute extracellular potentials." } ] }, @@ -357,7 +357,7 @@ "default": 1, "position": 0, "width": 10, - "help": "Material ID" + "help": "Material ID." }, { "name": "PHIENDMID", @@ -365,7 +365,7 @@ "default": 0.17, "position": 10, "width": 10, - "help": "Phi endocardium > mid" + "help": "Phi endocardium > mid." }, { "name": "PHIMIDEPI", @@ -373,7 +373,7 @@ "default": 0.58, "position": 20, "width": 10, - "help": "Phi mid > epicardium" + "help": "Phi mid > epicardium." } ] } @@ -387,7 +387,7 @@ "default": 1, "position": 0, "width": 10, - "help": "Material ID. A unique number must be specified (see *PART)." + "help": "Material ID. A unique number must be specified. (See *PART." } ] } @@ -401,7 +401,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID of the ECG computation" + "help": "ID of the ECG computation." }, { "name": "PSID", @@ -409,7 +409,7 @@ "default": null, "position": 10, "width": 10, - "help": "Point set ID containing the list of virtual points on which the pseudo-ECGs are computed" + "help": "Point set ID containing the list of virtual points that the pseudo-ECGs are computed on" } ] } @@ -423,7 +423,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID of the Laplace system to solve (define new id with each new line)" + "help": "ID of the Laplace system to solve. (Define a new ID with each new line.)" }, { "name": "Part ID", @@ -431,7 +431,7 @@ "default": null, "position": 10, "width": 10, - "help": "Part id on which the system is solved" + "help": "Part ID that the system is solved on" }, { "name": "sType", @@ -439,7 +439,7 @@ "default": null, "position": 20, "width": 10, - "help": "Segment type: EQ.1: node set, EQ.2: segment set" + "help": "Segment type: EQ.1: nodeset, EQ.2: segment set." }, { "name": "Ssid1", @@ -447,7 +447,7 @@ "default": null, "position": 30, "width": 10, - "help": "Set on which a potential of value 1 is prescribed" + "help": "Set a potential of value 1 is prescribed on." }, { "name": "Ssid0", @@ -455,7 +455,7 @@ "default": null, "position": 40, "width": 10, - "help": "Set on which a potential of value 0 is prescribed" + "help": "Set a potential of value 0 is prescribed on" } ] } @@ -469,7 +469,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID for the Purkinje network" + "help": "ID for the Purkinje network." }, { "name": "BUILDNET", @@ -485,7 +485,7 @@ "default": null, "position": 20, "width": 10, - "help": "Segment set on which the Purkinje network is lying" + "help": "Segment set that the Purkinje network is lying on" }, { "name": "MID", @@ -493,7 +493,7 @@ "default": null, "position": 30, "width": 10, - "help": "Material ID defined in the *MAT section" + "help": "Material ID defined in the *MAT section." }, { "name": "POINTSTX", @@ -501,7 +501,7 @@ "default": null, "position": 40, "width": 10, - "help": "X coordinate of the tree origin" + "help": "X coordinate of the tree origin." }, { "name": "POINTSTY", @@ -509,7 +509,7 @@ "default": null, "position": 50, "width": 10, - "help": "Y coordinate of the tree origin" + "help": "Y coordinate of the tree origin." }, { "name": "POINTSTZ", @@ -517,7 +517,7 @@ "default": null, "position": 60, "width": 10, - "help": "Z coordinate of the tree origin" + "help": "Z coordinate of the tree origin." }, { "name": "EDGELEN", @@ -525,7 +525,7 @@ "default": "3", "position": 70, "width": 10, - "help": "Edge length" + "help": "Edge length." } ] }, @@ -537,7 +537,7 @@ "default": "30", "position": 0, "width": 10, - "help": "Number of generations of branches" + "help": "Number of generations of branches." }, { "name": "NBRINIT", @@ -545,7 +545,7 @@ "default": "8", "position": 10, "width": 10, - "help": "Number of branches attached to the tree origin" + "help": "Number of branches attached to the tree origin." }, { "name": "NSPLIT", @@ -553,7 +553,7 @@ "default": "2", "position": 20, "width": 10, - "help": "Number of child branches at each node of the tree" + "help": "Number of child branches at each node of the tree." }, { "name": "INODEID", @@ -561,7 +561,7 @@ "default": null, "position": 30, "width": 10, - "help": "Initial node ID" + "help": "Initial node ID." }, { "name": "IEDGEID", @@ -569,7 +569,7 @@ "default": null, "position": 40, "width": 10, - "help": "Initial edge ID" + "help": "Initial edge ID." } ] } @@ -583,7 +583,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID of the stimulation" + "help": "ID of the stimulation." }, { "name": "SETTYPE", @@ -591,7 +591,7 @@ "default": null, "position": 10, "width": 10, - "help": "Set type: EQ.1: Segment set, EQ.2: Node set" + "help": "Set type: EQ.1: Segment set, EQ.2: nodeset." }, { "name": "SETID", @@ -599,7 +599,7 @@ "default": null, "position": 20, "width": 10, - "help": "Node set or segment set ID to be stimulated" + "help": "nodeset or segment set ID to stimulate." } ] }, @@ -611,7 +611,7 @@ "default": null, "position": 0, "width": 10, - "help": "Starting time of the stimulation" + "help": "Starting time of the stimulation." }, { "name": "STIMT", @@ -619,7 +619,7 @@ "default": "1000", "position": 10, "width": 10, - "help": "Stimulation period" + "help": "Stimulation period." }, { "name": "STIMDUR", @@ -627,7 +627,7 @@ "default": "2", "position": 20, "width": 10, - "help": "Stimulation duration" + "help": "Stimulation duration." }, { "name": "STIMAMP", @@ -635,7 +635,7 @@ "default": "50", "position": 30, "width": 10, - "help": "Stimulation amplitude" + "help": "Stimulation amplitude." } ] } @@ -649,7 +649,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID of the stimulation" + "help": "ID of the stimulation." }, { "name": "SETTYPE", @@ -657,7 +657,7 @@ "default": null, "position": 10, "width": 10, - "help": "Set type: EQ.1: Segment set, EQ.2: Node set" + "help": "Set type: EQ.1: Segment set, EQ.2: nodeset." }, { "name": "SETID", @@ -665,7 +665,7 @@ "default": null, "position": 20, "width": 10, - "help": "Node set or segment set ID to be stimulated" + "help": "nodeset or segment set ID to stimulate" }, { "name": "LCID", @@ -673,7 +673,7 @@ "default": null, "position": 30, "width": 10, - "help": "load curve to use for stimulation, where the first coordinate represents time and the second represents the stim. amplitude" + "help": "Load curve to use for stimulation, where the first coordinate represents time and the second represents the stimulation amplitude." } ] } diff --git a/src/ansys/health/heart/writer/custom_keywords/keywords/em_boundary_prescribed.py b/src/ansys/health/heart/writer/custom_keywords/keywords/em_boundary_prescribed.py index 3a97ef7d5..41de26254 100644 --- a/src/ansys/health/heart/writer/custom_keywords/keywords/em_boundary_prescribed.py +++ b/src/ansys/health/heart/writer/custom_keywords/keywords/em_boundary_prescribed.py @@ -85,7 +85,7 @@ def bptype(self, value: int) -> None: def settype(self) -> int: """Get or set the Set type: EQ.1:Segment Set. - EQ.2: Node Set. + EQ.2: nodeset. EQ.3: Fluid part. See *ICFD_PART. """ # nopep8 return self._cards[0].get_value("settype") diff --git a/src/ansys/health/heart/writer/custom_keywords/keywords/em_ep_tentusscher_stimulus.py b/src/ansys/health/heart/writer/custom_keywords/keywords/em_ep_tentusscher_stimulus.py index bfbb7289d..2d513e249 100644 --- a/src/ansys/health/heart/writer/custom_keywords/keywords/em_ep_tentusscher_stimulus.py +++ b/src/ansys/health/heart/writer/custom_keywords/keywords/em_ep_tentusscher_stimulus.py @@ -55,7 +55,7 @@ def __init__(self, **kwargs): @property def stimid(self) -> typing.Optional[int]: - """Get or set the ID of the stimulation""" # nopep8 + """Get or set the ID of the stimulation.""" # nopep8 return self._cards[0].get_value("stimid") @stimid.setter @@ -64,7 +64,7 @@ def stimid(self, value: int) -> None: @property def settype(self) -> typing.Optional[int]: - """Get or set the Set type: EQ.1: Segment set, EQ.2: Node set""" # nopep8 + """Get or set the Set type: EQ.1: Segment set, EQ.2: Node Set""" # nopep8 return self._cards[0].get_value("settype") @settype.setter @@ -73,7 +73,7 @@ def settype(self, value: int) -> None: @property def setid(self) -> typing.Optional[int]: - """Get or set the Node set or segment set ID to be stimulated""" # nopep8 + """Get or set the nodeset or segment set ID to stimulate.""" # nopep8 return self._cards[0].get_value("setid") @setid.setter @@ -82,7 +82,7 @@ def setid(self, value: int) -> None: @property def stimstrt(self) -> typing.Optional[float]: - """Get or set the Starting time of the stimulation""" # nopep8 + """Get or set the starting time of the stimulation.""" # nopep8 return self._cards[1].get_value("stimstrt") @stimstrt.setter @@ -91,7 +91,7 @@ def stimstrt(self, value: float) -> None: @property def stimt(self) -> float: - """Get or set the Stimulation period""" # nopep8 + """Get or set the Stimulation period.""" # nopep8 return self._cards[1].get_value("stimt") @stimt.setter @@ -100,7 +100,7 @@ def stimt(self, value: float) -> None: @property def stimdur(self) -> float: - """Get or set the Stimulation duration""" # nopep8 + """Get or set the stimulation duration.""" # nopep8 return self._cards[1].get_value("stimdur") @stimdur.setter @@ -109,7 +109,7 @@ def stimdur(self, value: float) -> None: @property def stimamp(self) -> float: - """Get or set the Stimulation amplitude""" # nopep8 + """Get or set the stimulation amplitude.""" # nopep8 return self._cards[1].get_value("stimamp") @stimamp.setter diff --git a/src/ansys/health/heart/writer/custom_keywords/user_kws.json b/src/ansys/health/heart/writer/custom_keywords/user_kws.json index 27f7e24a2..acf8226c6 100644 --- a/src/ansys/health/heart/writer/custom_keywords/user_kws.json +++ b/src/ansys/health/heart/writer/custom_keywords/user_kws.json @@ -8,7 +8,7 @@ "default": null, "position": 0, "width": 10, - "help": "ID of the set to which user-defined loading will be applied. Set type depends on the type of loading, see LTYPE." + "help": "ID of the set to apply user-defined loading to. Set type depends on the type of loading. See LTYPE." }, { "name": "LTYPE", @@ -24,7 +24,7 @@ "default": null, "position": 20, "width": 10, - "help": "Load curve, a function of time. Its current value, crv, is passed to user subroutine LOADSETUD." + "help": "Load curve, which is a function of time. Its current value, crv, is passed to the user subroutine LOADSETUD." }, { "name": "CID", @@ -32,7 +32,7 @@ "default": null, "position": 30, "width": 10, - "help": "Optional coordinate system along which scale factors SFi is defined. Global system is the default system." + "help": "Optional coordinate system that scale factors SFi is defined on. Global system is the default system." }, { "name": "SF1", @@ -40,7 +40,7 @@ "default": null, "position": 40, "width": 10, - "help": "Scale factor of loading magnitude, when LTYPE" + "help": "Scale factor of loading magnitude, when LTYPE." }, { "name": "SF2", @@ -48,7 +48,7 @@ "default": null, "position": 50, "width": 10, - "help": "Scale factor of loading magnitude, when LTYPE" + "help": "Scale factor of loading magnitude, when LTYPE." }, { "name": "SF3", @@ -56,7 +56,7 @@ "default": null, "position": 60, "width": 10, - "help": "Scale factor of loading magnitude, when LTYPE" + "help": "Scale factor of loading magnitude, when LTYPE." }, { "name": "IDULS", @@ -64,7 +64,7 @@ "default": null, "position": 70, "width": 10, - "help": "Each USER_LOADING_SET can be assigned a unique ID, which is passed to user subroutine LOADSETUD and allows multiple loading definitions by using a single user subroutine, LOADSETUD. If no value is input, LS-DYNA will assign a sequence number to each USER_LOADING_SET based on its definition sequence." + "help": "Each USER_LOADING_SET can be assigned a unique ID, which is passed to the user subroutine LOADSETUD and allows multiple loading definitions by using a single-user subroutine, LOADSETUD. If no value is specified, LS-DYNA assigns a sequence number to each USER_LOADING_SET based on its definition sequence." } ] } @@ -78,7 +78,7 @@ "default": null, "position": 0, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM2", @@ -86,7 +86,7 @@ "default": null, "position": 10, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM3", @@ -94,7 +94,7 @@ "default": null, "position": 20, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM4", @@ -102,7 +102,7 @@ "default": null, "position": 30, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM5", @@ -110,7 +110,7 @@ "default": null, "position": 40, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM6", @@ -118,7 +118,7 @@ "default": null, "position": 50, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM7", @@ -126,7 +126,7 @@ "default": null, "position": 60, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." }, { "name": "PARM8", @@ -134,7 +134,7 @@ "default": null, "position": 70, "width": 10, - "help": "This is the Nth user input parameter" + "help": "Nth user input parameter." } ] } diff --git a/src/ansys/health/heart/writer/define_function_templates.py b/src/ansys/health/heart/writer/define_function_templates.py index 6ab15f92c..2109b4687 100644 --- a/src/ansys/health/heart/writer/define_function_templates.py +++ b/src/ansys/health/heart/writer/define_function_templates.py @@ -26,7 +26,7 @@ def _function_alpha(alpha_endo: float = -60, alpha_epi: float = 60): - """Define the alpha angle for fiber definition.""" + """Define the alpha angle for the fiber definition.""" return "\n".join( [ "float alpha(", @@ -48,7 +48,7 @@ def _function_alpha(alpha_endo: float = -60, alpha_epi: float = 60): def _function_beta(beta_endo: float = 25, beta_epi: float = -65): - """Define the beta angle for fiber definition in ventricles.""" + """Define the beta angle for the fiber definition in ventricles.""" return "\n".join( [ " float beta(", @@ -70,7 +70,7 @@ def _function_beta(beta_endo: float = 25, beta_epi: float = -65): def _function_beta_septum(beta_endo: float = -65, beta_epi: float = 25): - """Define the beta angle for fiber definition in the septum.""" + """Define the beta angle for the fiber definition in the septum.""" return "\n".join( [ " float betaW(", @@ -488,11 +488,11 @@ def _define_function_0d_system( Parameters ---------- function_id : int - Function ID that defines the interaction between control volumes + Function ID that defines the interaction between control volumes. function_name : str - Name of the function + Function name. parameters : dict - Parameters of the system model + Parameters of the system model. Returns ------- diff --git a/src/ansys/health/heart/writer/dynawriter.py b/src/ansys/health/heart/writer/dynawriter.py index 9ed68a57b..43f34e3bf 100644 --- a/src/ansys/health/heart/writer/dynawriter.py +++ b/src/ansys/health/heart/writer/dynawriter.py @@ -20,11 +20,11 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -"""Module contain. classes for writing LS-DYNA keywords based. +"""Module containing classes for writing LS-DYNA keywords. Notes ----- -Uses a HeartModel (from ansys.health.heart.models). +This module uses a heart model from the ``ansys.health.heart.models`` module. """ @@ -85,8 +85,8 @@ create_define_sd_orientation_kw, create_discrete_elements_kw, create_element_shell_keyword, + create_element_solid_keyword, create_element_solid_ortho_keyword, - create_elemetn_solid_keyword, create_node_keyword, create_node_set_keyword, create_segment_set_keyword, @@ -107,27 +107,27 @@ class BaseDynaWriter: """Base class that contains essential features for all LS-DYNA heart models.""" def __init__(self, model: HeartModel, settings: SimulationSettings = None) -> None: - """Initialize writer by loading a HearModel and the desired settings. + """Initialize writer by loading a heart model and the desired settings. Parameters ---------- model : HeartModel - HeartModel object which contains the necessary information for the writer, - such as nodes, elements, and parts - settings : SimulationSettings, optional - Simulation settings used to create the LS-DYNA model. - Loads defaults if None, by default None + Object that contains the necessary information for the writer, + such as nodes, elements, and parts. + settings : SimulationSettings, default: None + Simulation settings for creating the LS-DYNA model. + The dfeault settings are loaded in ``None``is used. Example ------- TODO: add example """ self.model = model - """Model information necessary for creating the LS-DYNA .k files.""" + """Model information necessary for creating the LS-DYNA K files.""" self.kw_database = BaseDecks() - # These are general attributes useful for keeping track of ids: + # These are general attributes useful for keeping track of IDs: self.max_node_id: int = 0 """Max node id.""" self._used_part_ids: List[int] = [] @@ -153,7 +153,7 @@ def __init__(self, model: HeartModel, settings: SimulationSettings = None) -> No "vector": 0, "element": {"solid": 0, "discrete": 0, "shell": 0}, } - """Id offset for several relevant keywords.""" + """ID offset for several relevant keywords.""" #! Do we really need the below? for part in self.model.parts: @@ -162,7 +162,7 @@ def __init__(self, model: HeartModel, settings: SimulationSettings = None) -> No self.id_offset["part"] = np.max(self.model.part_ids) - # ! Removed the below since the part ids in self.model.parts are already defined. + # ! Removed the below since the part IDs in self.model.parts are already defined. # for part in self.model.parts: # id += 1 # # cannot use get_unique_part_id() because it checks in Deck() @@ -224,8 +224,8 @@ def _update_node_db(self, ids: np.ndarray = None): Parameters ---------- - ids : np.ndarray, optional - 0-based ids of nodes to write, by default None + ids : np.ndarray, default: None + 0-based IDs of the nodes to write. """ LOGGER.debug("Updating node keywords...") node_kw = keywords.Node() @@ -240,7 +240,7 @@ def _update_node_db(self, ids: np.ndarray = None): return def _update_parts_db(self): - """Loop over parts defined in the model and creates keywords.""" + """Loop over parts defined in the model and create keywords.""" LOGGER.debug("Updating part keywords...") # add parts with a dataframe @@ -303,7 +303,7 @@ def _update_segmentsets_db(self, add_caps: bool = False, add_cavities: bool = Tr for surface in part.surfaces: surface_global = self.model.mesh.get_surface(surface.id) if not surface_global: - LOGGER.debug(f"Failed to create segment set for {surface.name}") + LOGGER.debug(f"Failed to create segment set for {surface.name}.") continue if surface_global.n_cells == 0: LOGGER.debug(f"Failed to create segment set for {surface.name}. Empty mesh.") @@ -341,12 +341,12 @@ def _filter_bc_nodes(self, surface: SurfaceMesh): Notes ----- - The removed node must be connected with at least 1 node outside the boundary, see #656. + The removed node must be connected with at least one node outside the boundary. See #656. Parameters ---------- surface : SurfaceMesh - Boundary surface to be analysed. + Boundary surface to analyze. Returns ------- @@ -377,17 +377,17 @@ def _filter_bc_nodes(self, surface: SurfaceMesh): ] ) - # getting tets with 4 nodes in boundary + # get tets with 4 nodes in boundary issue_tets = np.where(np.sum(tet_mask, axis=0) == 4)[0] - # getting corresponding nodes + # get corresponding nodes issue_nodes = active_tets[issue_tets, :] - # counting node appearances + # count node appearances u_active_tets, tet_count_active = np.unique(active_tets, return_counts=True) u_issue_nodes, tet_count_issue = np.unique(issue_nodes, return_counts=True) - # finding issue nodes that belong to at least one non-issue tet + # find issue nodes that belong to at least one non-issue tet removable_mask = np.array( [ tet_count_active[np.where(u_active_tets == ii)[0][0]] @@ -396,15 +396,15 @@ def _filter_bc_nodes(self, surface: SurfaceMesh): ] ).reshape(-1, 4) - # removing the first issue node belonging to at least one non-issue tet (for each tet) + # remove the first issue node belonging to at least one non-issue tet (for each tet) column_idxs = np.argmax(removable_mask, axis=1) nodes_toremove = np.unique( [issue_nodes[ii, column_idxs[ii]] for ii in range(len(issue_tets))] ) - # checking that there are no nodes that only belong to non-issue tets + # check that there are no nodes that only belong to non-issue tets if not np.all(np.any(removable_mask, axis=1)): - # removing all such nodes and all their neighbors + # remove all such nodes and all their neighbors unsolvable_nodes = np.unique(issue_nodes[np.where(~np.any(removable_mask, axis=1))[0]]) #! NOTE: surface.point_neighbors uses local indexing, so should get local index #! from global indices. @@ -436,18 +436,19 @@ def _filter_bc_nodes(self, surface: SurfaceMesh): def _update_nodesets_db( self, remove_duplicates: bool = True, remove_one_node_from_cell: bool = False ): - """Update the node set database. + """Update the nodeset database. Parameters ---------- - remove_duplicates : bool, optional - Remove nodes if they are used in other nodeset, by default True - remove_one_node_from_cell : bool, optional - Remove a node if a cell has all nodes in nodeset, by default False + remove_duplicates : bool, default: True + Whether to remove nodes if they are used in other nodesets. + remove_one_node_from_cell : bool, default: False + Whether to remove a node if a cell has all nodes in a nodeset. Notes ----- - In FiberGenerationWriter, we do not allow all nodes of same element in one nodeset. + The ``FiberGenerationWriter`` module does not allow all nodes of the same + element in one nodeset. """ # formats endo, epi- and septum nodeset keywords, do for all surfaces # for each surface in each part add the respective node-set @@ -467,7 +468,7 @@ def _update_nodesets_db( if len(node_ids) == 0: LOGGER.debug( - "Nodes already used. Skipping node set for {0}".format( + "Nodes already used. Skipping nodeset for {0}".format( part.name + " " + cap.name ) ) @@ -490,7 +491,7 @@ def _update_nodesets_db( #! get up-to-date version of the surface. surface1 = self.model.mesh.get_surface(surface.id) if surface1.n_cells == 0: - LOGGER.debug(f"Failed to create node set for {surface.name}. Empty mesh.") + LOGGER.debug(f"Failed to create nodeset for {surface.name}. Empty mesh.") continue if remove_one_node_from_cell: @@ -510,18 +511,20 @@ def _update_nodesets_db( self.kw_database.node_sets.append(kw) def _get_unique_id(self, keyword: str, return_used_ids: bool = False) -> int: - """Get unique id of given keyword. + """Get unique ID of a given keyword. Parameters ---------- keyword : str Keyword string: valid inputs include: ["SECTION", "PART", "MAT", "SET_SEGMENT", "SET_NODE", "CURVE", ...] + return_used_ids : bool, default: False + Whether to return used IDs along with the next unique ID. Returns ------- int - Next unique id + Next unique ID. """ used_ids = [0] for key in self.kw_database.__dict__.keys(): @@ -530,7 +533,7 @@ def _get_unique_id(self, keyword: str, return_used_ids: bool = False) -> int: used_ids = np.array(used_ids, dtype=int) _, counts = np.unique(used_ids, return_counts=True) if np.any(counts > 1): - raise ValueError("{0} Duplicate ids found for: {1}".format(counts, keyword)) + raise ValueError("{0} Duplicate IDs found for: {1}".format(counts, keyword)) if return_used_ids: return np.max(used_ids) + 1, used_ids @@ -538,38 +541,38 @@ def _get_unique_id(self, keyword: str, return_used_ids: bool = False) -> int: return np.max(used_ids) + 1 def get_unique_part_id(self) -> int: - """Suggest a unique non-used part id.""" + """Suggest a unique non-used part ID.""" return self._get_unique_id("PART") def get_unique_mat_id(self) -> int: - """Suggest a unique non-used material id.""" + """Suggest a unique non-used material ID.""" return self._get_unique_id("MAT") def get_unique_section_id(self) -> int: - """Suggest a unique non-used section id.""" + """Suggest a unique non-used section ID.""" return self._get_unique_id("SECTION") def get_unique_segmentset_id(self) -> int: - """Suggest a unique non-used segment set id.""" + """Suggest a unique non-used segment set ID.""" return self._get_unique_id("SET_SEGMENT") def get_unique_nodeset_id(self) -> int: - """Suggest a unique non-used node set id.""" + """Suggest a unique non-used nodeset ID.""" return self._get_unique_id("SET_NODE") def get_unique_partset_id(self) -> int: - """Suggest a unique non-used node set id.""" + """Suggest a unique non-used part ID.""" return self._get_unique_id("SET_PART") def get_unique_curve_id(self) -> int: - """Suggest a unique curve-id.""" + """Suggest a unique curve ID.""" return self._get_unique_id("DEFINE_CURVE") def _get_decknames_of_include(self) -> list[str]: """ - Get a list of deck file name in keyword database. + Get a list of deck file names in the keyword database. - Except main and omit any empty decks. + Do not get those in the main deck and omit any empty decks. """ include_files = [] for deckname, deck in vars(self.kw_database).items(): @@ -584,12 +587,12 @@ def _get_decknames_of_include(self) -> list[str]: return include_files def include_to_main(self, file_list: list[str] | str = []): - """Add *INCLUDE keywords into main. + """Add *INCLUDE keywords into the main decl. Parameters ---------- - file_list : list[str] | str, optional - file(s) to be included, by default [] + file_list : list[str] | str, default: [] + Files to include. """ if isinstance(file_list, str): file_list = [file_list] @@ -605,19 +608,19 @@ def export(self, export_directory: str, user_k: list[str] = []): Parameters ---------- export_directory : str - export directory - user_k : list[str], optional - user provided k files, by default [] + Export directory. + user_k : list[str], default: [] + User-provided K files. """ tstart = time.time() - LOGGER.info("Writing all LS-DYNA .k files...") + LOGGER.info("Writing all LS-DYNA K files...") if not os.path.isdir(export_directory): os.makedirs(export_directory) for k_file in user_k: if not os.path.isfile(k_file): - error_msg = f"File {k_file} not found." + error_msg = f"File {k_file} is not found." LOGGER.error(error_msg) raise FileNotFoundError(error_msg) else: @@ -637,7 +640,7 @@ def export(self, export_directory: str, user_k: list[str] = []): return def export_databases(self, export_directory: str): - """Export each of non-empty databases to a specified directory.""" + """Export each non-empty database to a specified directory.""" if not export_directory: export_directory = self.model.info.working_directory @@ -664,7 +667,7 @@ def export_databases(self, export_directory: str): def _keep_ventricles(self): """Remove any non-ventricular parts.""" - LOGGER.debug("Just keeping ventricular-parts for fiber/purkinje generation") + LOGGER.debug("Only keeping ventricular-parts for fiber/Purkinje generation.") parts_to_keep = [ p.name for p in self.model.parts if p.part_type in [PartType.VENTRICLE, PartType.SEPTUM] ] @@ -681,19 +684,19 @@ def _keep_parts(self, parts_to_keep: List[str]): def _update_solid_elements_db(self, add_fibers: bool = True): """ - Create Solid (ortho) elements for all parts. + Create slid (ortho) elements for all parts. Parameters ---------- - add_fibers: bool, True - if add fiber in general. + add_fibers: bool, default: True + Whether to add fibers in general. """ LOGGER.debug("Updating solid element keywords...") if add_fibers: cell_data_fields = self.model.mesh.cell_data.keys() if "fiber" not in cell_data_fields or "sheet" not in cell_data_fields: - raise KeyError("Mechanics writer requires fiber and sheet fields") + raise KeyError("Mechanics writer requires fiber and sheet fields.") # create elements for each part for part in self.model.parts: @@ -772,7 +775,7 @@ def __init__( """Collection of keyword decks relevant for mechanics.""" self.set_flow_area: bool = True - """If flow area is set for control volume.""" + """Flag indicating if the flow area is set for control volume.""" return def update(self, dynain_name: str = None, robin_bcs: list[Callable] = None): @@ -780,14 +783,14 @@ def update(self, dynain_name: str = None, robin_bcs: list[Callable] = None): Parameters ---------- - dynain_name : str, optional - dynain file from stress free configuration computation, by default None - robin_bcs : list[Callable], optional - A list of lambda functions to apply Robin-type BCs, by default None + dynain_name : str, default: None + Dynain file from stress-free configuration computation. + robin_bcs : list[Callable], default: None + List of lambda functions to apply Robin-type coundary conditions. Notes ----- - Do not need to write mesh files if dynain file is given. + You do not need to write mesh files if a Dynain file is given. """ self._update_main_db() @@ -843,7 +846,7 @@ def update(self, dynain_name: str = None, robin_bcs: list[Callable] = None): return def _update_main_db(self): - """Update the main .k file.""" + """Update the main K file.""" LOGGER.debug("Updating main keywords...") self.kw_database.main.append("$$- Unit system: g-mm-ms-N-MPa-mJ -$$") @@ -875,7 +878,7 @@ def _add_solution_controls( dtmax: float = 10.0, simulation_type: str = "quasi-static", ): - """Add solution controls, output controls and solver settings.""" + """Add solution controls, output controls, and solver settings.""" # add termination keywords self.kw_database.main.append(keywords.ControlTermination(endtim=end_time)) @@ -890,7 +893,7 @@ def _add_solution_controls( beta = 0.25 else: raise ValueError( - "Simulation type not recognized: Please choose either quasi-static or static" + "Simulation type is not recognized: Choose either 'quasi-static' or 'static'." ) # prefill_time = self.parameters["Material"]["Myocardium"]["Active"]["Prefill"] @@ -944,10 +947,10 @@ def _add_export_controls(self, dt_output_d3plot: float = 0.05, dt_output_icvout: Parameters ---------- - dt_output_d3plot : float, optional - Writes full D3PLOT results at this time-step spacing, by default 0.05 - dt_output_icvout : float, optional - Writes control volume results at this time-step spacing, by default 0.001 + dt_output_d3plot : float, default: 0.5 + Time-step spacing to write full D3PLOT results at. + dt_output_icvout : float, default: 0.001 + Time-step spacing to write control volume results at. """ # add output control self.kw_database.main.append(keywords.ControlOutput(npopt=1, neecho=1, ikedit=0, iflush=0)) @@ -1021,7 +1024,7 @@ def _update_material_db(self, add_active: bool = True, em_couple: bool = False): for part in self.model.parts: if isinstance(part.meca_material, MechanicalMaterialModel.DummyMaterial): # assign material for part if it's empty - LOGGER.info(f"Material of {part.name} will be assigned automatically.") + LOGGER.info(f"Material of {part.name} is assigned automatically.") if part.fiber: part.meca_material = self.settings.get_mechanical_material( required_type="anisotropic", ep_coupled=em_couple @@ -1138,7 +1141,7 @@ def _add_cap_bc(self, bc_type: _BoundaryConditionType): return def _get_contraint_caps(self): - """Get list of constraint caps depending on models.""" + """Get a list of constraint caps, depending on models.""" constraint_caps = [] if isinstance(self.model, LeftVentricle): @@ -1176,9 +1179,9 @@ def _add_springs_cap_edge( Notes ----- - Appends these to the boundary condition database. + This method appends these springs to the boundary condition database. """ - LOGGER.debug(f"Adding spring b.c. for cap: {cap.name} of type {cap.type}") + LOGGER.debug(f"Adding spring boundary condition for cap: {cap.name} of type {cap.type}") attached_nodes = cap.global_node_ids_edge @@ -1347,13 +1350,13 @@ def _sigmoid(z): uvc_l = self.model.mesh.point_data["apico-basal"] except KeyError: LOGGER.warning( - "No apico-basal is found in point data, pericardium spring won't be created." + "No apico-basal is found in point data. Pericardium spring won't be created." ) uvc_l = np.ones(self.model.mesh.GetNumberOfPoints()) if np.any(uvc_l < 0): LOGGER.warning( - "Negative normalized longitudinal coordinate detected." - "Changing {0} negative uvc_l values to 1".format(np.sum((uvc_l < 0))), + "Negative normalized longitudinal coordinate is detected." + "Changing {0} negative uvc_l values to 1.".format(np.sum((uvc_l < 0))), ) uvc_l[uvc_l < 0] = 1 @@ -1367,31 +1370,32 @@ def write_robin_bc( surface: pv.PolyData, normal: np.ndarray = None, ) -> list: - """Create Robin BC on given surface. + """Create Robin boundary condition on a given surface. Parameters ---------- - robin_type : Literal["spring", "damper"] - Create spring or damper + robin_type : Literal["spring", "damper"] + Create spring or damper. constant : float - stiffness (MPa/mm) or viscosity (MPa/mm*ms) + Stiffness (MPa/mm) or viscosity (MPa/mm*ms). surface : pv.PolyData - Surface to apply BC, must contain point data '_global-point-ids'. - Will be scaled by nodal area and point data 'scale factor' if exists - normal : np.ndarray, optional - If no normal given, use nodal normals, by default None + Surface to apply boundary condition to. It must contain point data + ``_global-point-ids``. It is scaled by the nodal area and point data + scale factor if it exists. + normal : np.ndarray, default: None + Normal values. If no normal values are given, nodal normals are used. Returns ------- list - list of dyna input deck + List of the DYNA input deck. """ if surface.n_points == 0: - LOGGER.error("Surface is empty, no Robin BC is added.") + LOGGER.error("Surface is empty. No Robin boundary condition is added.") return [] if "_global-point-ids" not in surface.point_data: - raise ValueError("surface must contain pointdata '_global-point-ids'.") + raise ValueError("Surface must contain point data '_global-point-ids'.") # global node ids where to apply the BC # NOTE: if we pass in a SurfaceMesh object we could use the @@ -1399,7 +1403,7 @@ def write_robin_bc( nodes = surface["_global-point-ids"] # scale factor is nodal area - # Add area flag in case pyvista defaults change. + # Add area flag in case PyVista defaults change. surf2 = surface.compute_cell_sizes(length=False, volume=False, area=True) scale_factor = np.array( surf2.cell_data_to_point_data().point_data["Area"].copy(), dtype=np.float32 @@ -1423,7 +1427,7 @@ def write_robin_bc( # update offset self.id_offset["vector"] = sd_orientation_kw.vectors["vid"].to_numpy()[-1] - # create unique ids for keywords + # create unique IDs for keywords part_id = self.get_unique_part_id() section_id = self.get_unique_section_id() mat_id = self.get_unique_mat_id() @@ -1476,7 +1480,7 @@ def _update_cap_elements_db(self, add_mesh=True): Notes ----- - Loops over all the defined caps/valves. + This method loops over all the defined caps and valves. """ # material mat_null_id = self.get_unique_mat_id() @@ -1504,7 +1508,7 @@ def _update_cap_elements_db(self, add_mesh=True): for cap in caps: if cap.name in cap_names_used: # avoid to write mitral valve and triscupid valve twice - LOGGER.debug("Already created material for {}: skipping".format(cap.name)) + LOGGER.debug("Already created material for {}. Skipping.".format(cap.name)) continue cap.pid = self.get_unique_part_id() @@ -1523,7 +1527,7 @@ def _update_cap_elements_db(self, add_mesh=True): if cap.centroid is not None: if cap._node_set_id is None: - LOGGER.error("cap node set ID is not yet assigned") + LOGGER.error("Cap nodeset ID is not yet assigned.") exit() constraint = keywords.ConstrainedInterpolation( @@ -1568,7 +1572,7 @@ def _update_controlvolume_db(self, system_map: list[ControlVolume]): Parameters ---------- system_map : list[ControlVolume] - list of control volume + List of control volumes. """ def _create_null_part(): @@ -1658,10 +1662,10 @@ class ZeroPressureMechanicsDynaWriter(MechanicsDynaWriter): Notes ----- - Derived from MechanicsDynaWriter and consequently derives all keywords relevant - for simulations involving mechanics. This class does not write the - control volume keywords but adds the keyword for computing the stress - free configuration based on left/right cavity pressures instead. + This class is derived from the ``MechanicsDynaWriter`` class and consequently + derives all keywords relevant for simulations involving mechanics. This class + does not write the control volume keywords but rather adds the keyword for computing + the stress-free configuration based on left/right cavity pressures instead. """ @@ -1682,8 +1686,8 @@ def update(self, robin_bcs: list[Callable] = None): Parameters ---------- - robin_bcs : list[Callable], optional - A list of lambda functions to apply Robin-type BCs, by default None + robin_bcs : list[Callable], default: None + List of lambda functions to apply Robin-type boundary conditions. """ # bc_settings = self.settings.mechanics.boundary_conditions @@ -1760,12 +1764,12 @@ def update(self, robin_bcs: list[Callable] = None): return def _add_export_controls(self, dt_output_d3plot: float = 0.5): - """Rewrite method for zerop export. + """Rewrite the method for zerop export. Parameters ---------- - dt_output_d3plot : float, optional - Writes full D3PLOT results at this time-step spacing, by default 0.5 + dt_output_d3plot : float, default: 0.5 + Time-space spacing to write full D3PLOT results at. """ # add output control self.kw_database.main.append(keywords.ControlOutput(npopt=1, neecho=1, ikedit=0, iflush=0)) @@ -1799,7 +1803,7 @@ def _add_export_controls(self, dt_output_d3plot: float = 0.5): return def _add_solution_controls(self): - """Rewrite method for the zerop simulation.""" + """Rewrite the method for the zerop simulation.""" settings = copy.deepcopy(self.settings.stress_free) settings._remove_units() @@ -1862,8 +1866,8 @@ def _add_control_reference_configuration(self): # Notes # ----- - # LSDYNA stress reference configuration lead to a bug with this load, - # it seems due to define function, need to be investigated. + # LS-DYNA stress reference configuration leads to a bug with this load. + # It seems due to define function and must be investigated. # """ # cavities = [part.cavity for part in self.model.parts if part.cavity] # for cavity in cavities: @@ -1963,13 +1967,16 @@ def __init__(self, model: HeartModel, settings: SimulationSettings = None) -> No """Collection of keywords relevant for fiber generation.""" def update(self, rotation_angles=None): - """Update keyword database for Fiber generation: overwrites the inherited function.""" + """Update keyword database for fiber generation. + + This method overwrites the inherited function. + """ ## self._update_main_db() # needs updating if isinstance(self.model, (FourChamber, FullHeart)): LOGGER.warning( - "Atrium present in the model, these will be removed for ventricle fiber generation." + "Atrium are present in the model. These are removed for ventricle fiber generation." ) parts = [ @@ -2109,8 +2116,8 @@ def _update_ep_settings(self): def _update_create_fibers(self, rotation_angles): """Update the keywords for fiber generation.""" # collect relevant node and segment sets. - # node set: apex, base - # node set: endocardium, epicardium + # nodeset: apex, base + # nodeset: endocardium, epicardium # NOTE: could be better if basal nodes are extracted in the preprocessor # since that would allow you to robustly extract these nodessets using the # input data @@ -2127,7 +2134,7 @@ def _update_create_fibers(self, rotation_angles): None, ) - # collect node set ids (already generated previously) + # collect nodeset IDs (generated previously) node_sets_ids_epi = [ventricle.epicardium._node_set_id for ventricle in ventricles] node_sets_ids_endo = [] for ventricle in ventricles: @@ -2136,7 +2143,7 @@ def _update_create_fibers(self, rotation_angles): surf = self.model.mesh.get_surface(surface.id) if surf.n_cells == 0: LOGGER.debug( - f"Failed to collect node-set id for {surface.name}. Empty mesh." + f"Failed to collect nodeset ID for {surface.name}. Empty mesh." ) continue node_sets_ids_endo.append(surface._node_set_id) @@ -2154,13 +2161,13 @@ def _update_create_fibers(self, rotation_angles): for cap in part.caps: nodes_base = np.append(nodes_base, cap.global_node_ids_edge) - # apex id [0] endocardium, [1] epicardum + # apex ID [0] endocardium, [1] epicardium apex_point = self.model.get_part("Left ventricle").apex_points[1] if "epicardium" not in apex_point.name: - raise ValueError("Expecting a point on the epicardium") - node_apex = apex_point.node_id # is this a global node id? + raise ValueError("Expecting a point on the epicardium.") + node_apex = apex_point.node_id # is this a global node ID? - # validate node set by removing nodes not part of the model without ventricles + # validate nodeset by removing nodes not part of the model without ventricles tet_ids_ventricles = np.empty((0), dtype=int) if septum: parts = ventricles + [septum] @@ -2174,7 +2181,7 @@ def _update_create_fibers(self, rotation_angles): # remove nodes that occur just in atrial part mask = np.isin(nodes_base, tetra_ventricles, invert=True) - LOGGER.debug("Removing {0} nodes from base nodes".format(np.sum(mask))) + LOGGER.debug("Removing {0} nodes from base nodes...".format(np.sum(mask))) nodes_base = nodes_base[np.invert(mask)] # create set parts for lv and rv myocardium @@ -2182,7 +2189,7 @@ def _update_create_fibers(self, rotation_angles): # switch between the various models to generate valid input decks if isinstance(self.model, LeftVentricle): - LOGGER.warning("Model type %s in development " % self.model.__class__.__name__) + LOGGER.warning("Model type %s is in development. " % self.model.__class__.__name__) # Define part set for myocardium part_list1_kw = keywords.SetPartList( @@ -2194,7 +2201,7 @@ def _update_create_fibers(self, rotation_angles): self.kw_database.create_fiber.extend([part_list1_kw]) - # combine node sets endocardium uing *SET_NODE_ADD: + # combine nodesets endocardium uing *SET_NODE_ADD: node_set_id_all_endocardium = self.get_unique_nodeset_id() set_add_kw = keywords.SetNodeAdd(sid=node_set_id_all_endocardium) @@ -2204,7 +2211,7 @@ def _update_create_fibers(self, rotation_angles): self.kw_database.create_fiber.append(set_add_kw) - # combine node sets epicardium: + # combine nodesets epicardium: node_set_id_all_epicardium = self.get_unique_nodeset_id() set_add_kw = keywords.SetNodeAdd(sid=node_set_id_all_epicardium) set_add_kw.options["TITLE"].active = True @@ -2311,7 +2318,7 @@ def _update_create_fibers(self, rotation_angles): self.kw_database.create_fiber.extend([part_list1_kw, part_list2_kw]) - # combine node sets endocardium uing *SET_SEGMENT_ADD: + # combine nodesets endocardium uing *SET_SEGMENT_ADD: node_set_id_all_endocardium = self.get_unique_nodeset_id() set_add_kw = keywords.SetNodeAdd(sid=node_set_id_all_endocardium) @@ -2321,7 +2328,7 @@ def _update_create_fibers(self, rotation_angles): self.kw_database.create_fiber.append(set_add_kw) - # combine node sets epicardium: + # combine nodesets epicardium: node_set_id_all_epicardium = self.get_unique_nodeset_id() set_add_kw = keywords.SetNodeAdd(sid=node_set_id_all_epicardium) @@ -2331,7 +2338,7 @@ def _update_create_fibers(self, rotation_angles): self.kw_database.create_fiber.append(set_add_kw) - # combine node sets epicardium and septum: + # combine nodesets epicardium and septum: node_set_all_but_left_endocardium = self.get_unique_nodeset_id() set_add_kw = keywords.SetNodeAdd(sid=node_set_all_but_left_endocardium) @@ -2474,15 +2481,18 @@ def __init__( """Collection of keywords relevant for Purkinje generation.""" def update(self): - """Update keyword database - overwrites the inherited function.""" + """Update keyword database. + + This method overwrites the inherited function. + """ ## self._update_main_db() # needs updating self._update_node_db() # can stay the same (could move to base class) if isinstance(self.model, (FourChamber, FullHeart)): LOGGER.warning( - "Atrium present in the model, " - "these will be removed for ventricle Purkinje generation." + "Atrium are present in the model. " + "These are removed for ventricle Purkinje generation." ) self._keep_ventricles() @@ -2544,12 +2554,12 @@ def _update_ep_settings(self): def _update_create_Purkinje(self): # noqa N802 """Update the keywords for Purkinje generation.""" # collect relevant node and segment sets. - # node set: apex, base - # node set: endocardium, epicardium + # nodeset: apex, base + # nodeset: endocardium, epicardium # NOTE: could be better if basal nodes are extracted in the preprocessor # since that would allow you to robustly extract these nodessets using the # input data - # The below is relevant for all models. + # What follows is relevant for all models. node_origin_left = np.empty(0, dtype=int) node_origin_right = np.empty(0, dtype=int) @@ -2722,7 +2732,7 @@ def _update_main_db(self): class ElectrophysiologyDynaWriter(BaseDynaWriter): - """Class for preparing the input for an Electrophysiology LS-DYNA simulation.""" + """Class for preparing the input for an electrophysiology LS-DYNA simulation.""" def __init__( self, @@ -2736,10 +2746,10 @@ def __init__( super().__init__(model=model, settings=settings) self.kw_database = ElectrophysiologyDecks() - """Collection of keywords relevant for Electrophysiology.""" + """Collection of keywords relevant for electrophysiology.""" def update(self): - """Update keyword database for Electrophysiology.""" + """Update keyword database for electrophysiology.""" # self._isolate_atria_and_ventricles() ## @@ -2756,8 +2766,8 @@ def update(self): self._update_segmentsets_db(add_cavities=True) - # TODO: check if no existing node set ids conflict with surface ids - # For now, new node sets should be created after calling + # TODO: check if no existing nodeset ids conflict with surface ids + # For now, new nodesets should be created after calling # self._update_nodesets_db() self._update_nodesets_db() self._update_parts_cellmodels() @@ -2792,7 +2802,7 @@ def _update_dummy_material_db(self): ) def _update_ep_material_db(self): - """Add EP material for each defined part.""" + """Add electrophysiology material for each defined part.""" material_settings = self.settings.electrophysiology.material solvertype = self.settings.electrophysiology.analysis.solvertype if solvertype == "Monodomain": @@ -2806,7 +2816,7 @@ def _update_ep_material_db(self): for part in self.model.parts: if isinstance(part.ep_material, EPMaterial.DummyMaterial): - LOGGER.info(f"Material of {part.name} will be assigned automatically.") + LOGGER.info(f"Material of {part.name} is assigned automatically.") if part.active: part.ep_material = EPMaterial.Active(sigma_fiber=sig1) else: @@ -2850,7 +2860,7 @@ def _create_myocardial_nodeset_layers(self): percent_endo = self.settings.electrophysiology.material.myocardium["percent_endo"] percent_mid = self.settings.electrophysiology.material.myocardium["percent_mid"] values = self.model.mesh.point_data["transmural"] - # Values from experimental data, see: + # Values from experimental data. See: # https://www.frontiersin.org/articles/10.3389/fphys.2019.00580/full th_endo = percent_endo th_mid = percent_endo + percent_mid @@ -2881,7 +2891,7 @@ def _create_myocardial_nodeset_layers(self): return endo_nodeset_id, mid_nodeset_id, epi_nodeset_id def _add_cell_model_keyword(self, matid: int, cellmodel: CellModel): - """Add cell model keyword to database.""" + """Add cell model keyword to the database.""" if isinstance(cellmodel, CellModel.Tentusscher): self._add_Tentusscher_keyword(matid=matid, params=cellmodel.to_dictionary()) else: @@ -2891,7 +2901,7 @@ def _add_Tentusscher_keyword(self, matid: int, params: dict): # noqa N802 cell_kw = keywords.EmEpCellmodelTentusscher(**{**params}) cell_kw.mid = matid # Note: bug in EmEpCellmodelTentusscher - # the following 2 parameters can not be assigned by above method + # the following 2 parameters cannot be assigned by above method cell_kw.gas_constant = 8314.472 cell_kw.faraday_constant = 96485.3415 @@ -2941,7 +2951,7 @@ def _update_ep_settings(self): macrodt = self.settings.electrophysiology.analysis.dtmax.m if macrodt > self.settings.mechanics.analysis.dtmax.m: LOGGER.info( - "EP Timestep > Mechanics Timestep. Setting EP Timestep to Mechanics Timestep" + "EP timestep > Mechanics timestep. Setting EP timestep to Mechanics timestep." ) macrodt = self.settings.mechanics.analysis.dtmax.m @@ -3018,9 +3028,9 @@ def _add_stimulation_keyword(self, stim: Stimulation): eikonal_stim_content = "*EM_EP_EIKONAL\n" eikonal_stim_content += "$ eikId eikPaSet eikStimNS eikStimDF\n" - # TODO: get the right part set id + # TODO: get the right part set ID # setpart_kwds = self.kw_database.ep_settings.get_kwds_by_type() - # id of the eikonal solver (different eikonal solves + # ID of the eikonal solver (different eikonal solves # can be performed in different parts of the model) eikonal_id = 1 psid = 1 @@ -3043,14 +3053,14 @@ def _add_stimulation_keyword(self, stim: Stimulation): def get_default_stimulus_nodes(self) -> list[int]: """Get default stiumulus nodes. - 1/2 apex point(s) for Left/Bi-ventricle model. + 1/2 apex points for the left/bi-ventricle model. - Sinoatrial node for Fourchamber/Full heart model + Sinoatrial node for four-chamber or full-heart model. Returns ------- list[int] - 0-based node IDs to sitmulate + List of 0-based node IDs to stimulate. """ if isinstance(self.model, LeftVentricle): stim_nodes = [self.model.left_ventricle.apex_points[0].node_id] @@ -3187,9 +3197,9 @@ def _update_main_db(self): pass def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 - """Update keywords for Purkinje usage.""" + """Update keywords for Purkinje use.""" if not isinstance(self.model, (FullHeart, FourChamber, BiVentricle, LeftVentricle)): - LOGGER.error("Model type not recognized.") + LOGGER.error("Model type is not recognized.") return sid = self.get_unique_section_id() @@ -3315,7 +3325,7 @@ def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 self._add_cell_model_keyword(matid=pid, cellmodel=epmat.cell_model) # Build connectivity - # get edges in a 2 column format + # get edges in a two-column format edges = self.model.conduction_system.get_lines(netid).lines.reshape( (int(len(self.model.conduction_system.get_lines(netid).lines) / 3), 3) )[:, 1:] @@ -3331,11 +3341,11 @@ def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 connected_point_ids ] - # got ids in solid mesh of connected points + # got IDs in solid mesh of connected points kdtree = spatial.cKDTree(self.model.mesh.points) _, solid_connected_point_ids = kdtree.query(connected_points) - # compute writer point ids depending on previously written and connections to solid + # compute writer point IDs depending on previously written and connections to solid point_ids_to_write = np.zeros( self.model.conduction_system.get_lines(netid).number_of_points, dtype=int ) @@ -3358,7 +3368,7 @@ def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 + beam_point_offset_id ) - # replace point id values in edges + # replace point ID values in edges edges = np.vectorize(lambda idvalue: point_ids_to_write[idvalue])(edges) # write mesh @@ -3369,7 +3379,7 @@ def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 pid=pid, offset=beam_elem_id_offset, ) - # offset beam element id + # offset beam element ID beam_elem_id_offset += len(edges) # offset beam point id beam_point_offset_id += ( @@ -3377,7 +3387,7 @@ def _update_use_Purkinje(self, associate_to_segment: bool = True): # noqa N802 - len(connected_point_ids) - np.sum(mask_already_written) ) - # populate the already written ids variable for other beam networks + # populate the already written IDs variable for other beam networks point_ids_in_conductionsystem = self.model.conduction_system.get_lines(netid)[ "_global-point-ids" ] @@ -3494,15 +3504,15 @@ def _get_ep_material_kw(self, ep_mid: int, ep_material: EPMaterial): class ElectrophysiologyBeamsDynaWriter(ElectrophysiologyDynaWriter): - """Class for preparing the input for an Electrophysiology LS-DYNA simulation with beams only.""" + """Class for preparing the input for an electrophysiology LS-DYNA simulation with beams only.""" def __init__(self, model: HeartModel, settings: SimulationSettings = None) -> None: super().__init__(model=model, settings=settings) self.kw_database = ElectrophysiologyDecks() - """Collection of keywords relevant for Electrophysiology.""" + """Collection of keywords relevant for electrophysiology.""" def update(self): - """Update keyword database for Electrophysiology.""" + """Update keyword database for electrophysiology.""" # self._isolate_atria_and_ventricles() ## @@ -3551,14 +3561,14 @@ def update(self, dynain_name: str = None, robin_bcs=None): Parameters ---------- - dynain_name : str, optional - dynain file from stress free configuration computation, by default None - robin_bcs : list[Callable], optional - A list of lambda functions to apply Robin-type BCs, by default None + dynain_name : str, default: None + Dynain file from stress-free configuration computation. + robin_bcs : list[Callable], default: None + List of lambda functions to apply Robin-type boundary conditions. Notes ----- - Do not need to write mesh files if dynain file is given. + You do not need to write mesh files if a Dynain file is given. """ if isinstance(self.model, FourChamber): self.model.left_atrium.fiber = True @@ -3598,9 +3608,9 @@ def _update_material_db(self, add_active: bool = True): class LaplaceWriter(BaseDynaWriter): - """Writer to set Laplace dirichlet problem.""" + """Writer to set Laplace Dirichlet problem.""" - # constant node set ID for atrial valves/caps + # constant nodeset ID for atrial valves/caps _CAP_NODESET_MAP = { CapType.RIGHT_INFERIOR_PULMONARY_VEIN: 1, CapType.LEFT_ATRIUM_APPENDAGE: 2, @@ -3618,22 +3628,22 @@ class LaplaceWriter(BaseDynaWriter): def __init__( self, model: HeartModel, type: Literal["uvc", "la_fiber", "ra_fiber", "D-RBM"], **kwargs ): - """Write thermal input to set up a Laplace dirichlet problem. + """Write thermal input to set up a Laplace Dirichlet problem. Parameters ---------- model : HeartModel - Heart model - type : Literal["uvc", "la_fiber", "ra_fiber", "D - simulation type + Heart model. + type : Literal["uvc", "la_fiber", "ra_fiber", "D-RBM"] + Simulation type. """ super().__init__(model=model) self.type = type - """problem type.""" + """Problem type.""" self.landmarks = kwargs - """landmarks can be `laa`, `raa`, `top`.""" + """Landmarks are ``laa``, ``raa``, and ``top``.""" self.target: pv.UnstructuredGrid = None - """target mesh related to the problem.""" + """Target mesh related to the problem.""" # remove unnecessary parts if self.type == "uvc" or self.type == "D-RBM": @@ -3670,12 +3680,12 @@ def __init__( def _update_ra_top_nodeset(self, atrium: pv.UnstructuredGrid): """ - Define right atrium top nodeset with node set id 10. + Define right atrium top nodeset with nodeset ID 10. Parameters ---------- atrium : pv.UnstructuredGrid - right atrium pyvista object + Right atrium PyVista object. """ if "top" in self.landmarks.keys(): top_ids = self._find_top_nodeset_by_geodesic(atrium) @@ -3690,10 +3700,10 @@ def _find_top_nodeset_by_cut(self, atrium: pv.UnstructuredGrid): """ Define right atrium top nodeset. - Cut through the center of TV, IVC and SVC, expecting to result in - 3 unconnected regions and the farthest is top. - This method may fail with varying geometries, then the user - needs to define the top landmarks. + Cut through the center of TV, IVC, and SVC, expecting to result in + three unconnected regions and the farthest is top. + This method might fail with varying geometries. If so, the user + must define the top landmarks. """ cut_center, cut_normal = self._define_ra_cut() @@ -3707,12 +3717,12 @@ def _find_top_nodeset_by_cut(self, atrium: pv.UnstructuredGrid): if np.max(x.point_data["RegionId"]) != 2: # Should only have 3 parts LOGGER.error("Cannot find top nodeset...") - raise ValueError("Please define top start/end points and re-run.") + raise ValueError("Define top start and end points and then re-run.") # get tricuspid-valve name tv_name = CapType.TRICUSPID_VALVE_ATRIUM.value - # compare closest point with TV nodes, top region should be far with TV node set + # compare closest point with TV nodes, top region should be far with TV nodeset tv_tree = spatial.cKDTree(atrium.points[atrium.point_data[tv_name] == 1]) min_dst = -1.0 for i in range(3): @@ -3745,7 +3755,7 @@ def _find_top_nodeset_by_geodesic(self, atrium: pv.UnstructuredGrid): return np.unique(np.array(top_ids)) def _define_ra_cut(self): - """Define a cut-plane using the three caps of right atrium.""" + """Define a cutplane using the three caps of the right atrium.""" for cap in self.model.parts[0].caps: if cap.type == CapType.TRICUSPID_VALVE_ATRIUM: tv_center = cap.centroid @@ -3759,18 +3769,18 @@ def _define_ra_cut(self): return cut_center, cut_normal def _update_ra_tricuspid_nodeset(self, atrium): - """Define nodeset for tricuspid_wall and tricuspid_septum.""" + """Define the nodeset for the tricuspid wall and septum.""" # get tricuspid-valve name tv_name = CapType.TRICUSPID_VALVE_ATRIUM.value - # The cut_normal is determined so 1st part will be septum and 2nd will be free + # cut_normal is determined so that the first part is the septum and the second is free cut_center, cut_normal = self._define_ra_cut() - # need a copied object to do clip, atrium will be corrupted otherwise + # need a copied object to do clip, atrium is corrupted otherwise septum, free_wall = copy.deepcopy(atrium).clip( origin=cut_center, normal=cut_normal, crinkle=True, return_clipped=True ) - # ids in full mesh + # IDs in full mesh tv_s_ids = septum["point_ids"][np.where(septum[tv_name] == 1)] tv_s_ids_sub = np.where(np.isin(atrium["point_ids"], tv_s_ids))[0] @@ -3792,19 +3802,19 @@ def _update_ra_tricuspid_nodeset(self, atrium): self.kw_database.node_sets.append(kw) def _update_atrial_caps_nodeset(self, atrium: pv.UnstructuredGrid): - """Define node sets for the caps.""" + """Define nodesets for the caps.""" for cap in self.model.parts[0].caps: # get node IDs for atrium mesh cap._mesh = self.model.mesh.get_surface(cap._mesh.id) ids_sub = np.where(np.isin(atrium["point_ids"], cap.global_node_ids_edge))[0] - # create node set + # create nodeset set_id = self._CAP_NODESET_MAP[cap.type] if set_id: # Can be None for LEFT_ATRIUM_APPENDAGE kw = create_node_set_keyword(ids_sub + 1, node_set_id=set_id, title=cap.name) self.kw_database.node_sets.append(kw) - # Add info to pyvista object, necessary for right atrial fibers. + # Add info to PyVista object, which is necessary for right atrial fibers. atrium[cap.type.value] = np.zeros(atrium.n_points, dtype=int) atrium[cap.type.value][ids_sub] = 1 @@ -3891,16 +3901,16 @@ def _update_ra_bc(self): self.add_case(case_id, job_name, set_ids, bc_values) def update(self): - """Update keyword database.""" + """Update the keyword database.""" # nodes node_kw = create_node_keyword(self.target.points) self.kw_database.nodes.append(node_kw) - # part and mat + # part and materials self._update_parts_materials_db() # elems - kw_elements = create_elemetn_solid_keyword( + kw_elements = create_element_solid_keyword( self.target.cells.reshape(-1, 5)[:, 1:] + 1, np.arange(1, self.target.n_cells + 1, dtype=int), self.model.parts[0].pid, @@ -3978,7 +3988,7 @@ def _update_uvc_bc(self): self.add_case(case_id, job_name, set_ids, bc_values) def _get_uvc_rotation_bc(self): - """Select node set on long axis plane.""" + """Select the nodeset on the long axis plane.""" mesh = copy.deepcopy(self.target) mesh["cell_ids"] = np.arange(0, mesh.n_cells, dtype=int) mesh["point_ids"] = np.arange(0, mesh.n_points, dtype=int) @@ -4009,7 +4019,7 @@ def _get_uvc_rotation_bc(self): return set1, set2, set3 def _update_parts_materials_db(self): - """Loop over parts defined in the model and creates keywords.""" + """Loop over parts defined in the model and create keywords.""" LOGGER.debug("Updating part keywords...") # add parts with a dataframe @@ -4056,27 +4066,27 @@ def _update_main_db(self): self.kw_database.main.append(keywords.ControlTermination(endtim=1, dtmin=1.0)) def _add_nodeset(self, nodes: np.ndarray, title: str, nodeset_id: int = None) -> int: - """Convert to local node ID and add to nodeset. + """Convert to local node ID and add to the nodeset. Parameters ---------- nodes : np.ndarray - Nodes global ids + Nodes global IDsx title : str - nodeset title - nodeset_id : int, optional - attribute a nodeset ID if not given, by default None + Nodeset title. + nodeset_id : int, default: None + Attribute a nodeset ID if one is not given. Returns ------- int - nodeset id + Nodeset ID. """ - # get node IDs of sub mesh + # get node IDs of submesh nodes = np.where(np.isin(self.target["point_ids"], nodes))[0] if nodeset_id is None: nodeset_id = self.get_unique_nodeset_id() - # lsdyna ID start with 1 + # LS-DYNA ID starts with 1 kw = create_node_set_keyword(nodes + 1, node_set_id=nodeset_id, title=title) self.kw_database.node_sets.append(kw) return nodeset_id @@ -4085,7 +4095,7 @@ def _update_drbm_bc(self): """Update D-RBM boundary conditions.""" def clean_node_set(nodes: np.ndarray, exclude_nodes: np.ndarray = None): - """Make sure there are no duplicate or excluded nodes, avoid thermal BC error.""" + """Ensure no duplicate or excluded nodes to avoid a thermal boundary condition error.""" nodes = np.unique(nodes) if exclude_nodes is not None: nodes = np.setdiff1d(nodes, exclude_nodes) @@ -4185,7 +4195,7 @@ def _get_rv_septum_endo_surface(self): if "endocardium" in surface.name and "septum" in surface.name: return surface - raise ValueError("Septum endocardium surface not found in right ventricle.") + raise ValueError("Septum endocardium surface is not found in right ventricle.") def _update_ventricular_caps_nodes(self): combined_av_mv = False # combined mitral and aortic valve @@ -4210,18 +4220,18 @@ def _update_ventricular_caps_nodes(self): return (pv_nodes, tv_nodes, av_nodes, mv_nodes), combined_av_mv def add_case(self, case_id: int, case_name: str, set_ids: list[int], bc_values: list[float]): - """Add case to keyword database. + """Add a case to the keyword database. Parameters ---------- case_id : int - case id + Case ID. case_name : str - case name, will be d3plot file name + Case name, which is the d3plot filename. set_ids : list[int] - node set id for boundary condition + List of nodeset IDs for boundary conditions. bc_values : list[float] - boundary condition values + List of boundary condition values. """ # declare case self.kw_database.main.append(keywords.Case(caseid=case_id, jobid=case_name, scid1=case_id)) diff --git a/src/ansys/health/heart/writer/heart_decks.py b/src/ansys/health/heart/writer/heart_decks.py index a717d9e5f..cd749c5ae 100644 --- a/src/ansys/health/heart/writer/heart_decks.py +++ b/src/ansys/health/heart/writer/heart_decks.py @@ -30,7 +30,7 @@ class BaseDecks: Notes ----- - Used to distinguish between each of the decks. + This class used to distinguish between each of the decks. This base class defines some commonly used decks. """ @@ -80,7 +80,7 @@ def __init__(self) -> None: class ElectrophysiologyDecks(BaseDecks): - """Useful decks for Electrophysiology simulations.""" + """Useful decks for electrophysiology simulations.""" def __init__(self) -> None: super().__init__() diff --git a/src/ansys/health/heart/writer/keyword_utils.py b/src/ansys/health/heart/writer/keyword_utils.py index 28783bacd..f03ec2e7e 100644 --- a/src/ansys/health/heart/writer/keyword_utils.py +++ b/src/ansys/health/heart/writer/keyword_utils.py @@ -32,17 +32,17 @@ def create_node_keyword(nodes: np.array, offset: int = 0) -> keywords.Node: - """Create node keyword from numpy array of nodes. + """Create node keyword from a NumPy array of nodes. Parameters ---------- nodes : np.array - Numpy array containing the node coordinates + NumPy array containing the node coordinates. Returns ------- keywords.Node - Formatted node keyword + Formatted node keyword. """ # create array with node ids nids = np.arange(0, nodes.shape[0], 1) + 1 @@ -60,20 +60,20 @@ def add_nodes_to_kw(nodes: np.array, node_kw: keywords.Node, offset: int = 0) -> Notes ----- - If nodes are already defined, this adds both the nodes in the previous - keyword and the specified array of nodes. Automatically computes - the index offset in case node_kw.nodes is not empty. + If nodes are already defined, this method adds both the nodes in the previous + keyword and the specified array of nodes. It automatically computes + the index offset in case ``node_kw.nodes`` is not empty. Parameters ---------- nodes : np.array - Numpy array of node coordinates to add - If (n,3), node ID will be continuous by offset. - If (n,4), first column will be node ID. + NumPy array of node coordinates to add. + If (n,3), the node ID is continuous by offset. + If (n,4), the first column is the node ID. node_kw : keywords.Node - Node keyword + Node keyword. offset : int - Node id offset + Node ID offset. """ if nodes.shape[1] == 4: df = pd.DataFrame(data=nodes, columns=node_kw.nodes.columns[0:4]) @@ -83,7 +83,7 @@ def add_nodes_to_kw(nodes: np.array, node_kw: keywords.Node, offset: int = 0) -> last_nid = node_kw.nodes.iloc[-1, 0] offset = last_nid - # create array with node ids + # create array with node IDs nids = np.arange(0, nodes.shape[0], 1) + offset + 1 # create dataframe @@ -105,18 +105,18 @@ def add_beams_to_kw( Notes ----- - If beams are already defined, this adds both the beams in the previous - keyword and the specified array of beams. Automatically computes - the index offset in case beam_kw.elements is not empty. + If beams are already defined, this method adds both the beams in the previous + keyword and the specified array of beams. It automatically computes + the index offset in case ``beam_kw.elements`` is not empty. Parameters ---------- beams : np.array - Numpy array of beam coordinates to add + NumPy array of beam coordinates to add. beam_kw : keywords.beam - beam keyword + Beam keyword. offset : int - beam id offset + Beam ID offset. """ # get beam id of last beam: if not beam_kw.elements.empty and offset == 0: @@ -150,20 +150,20 @@ def create_segment_set_keyword( Parameters ---------- segments : np.array - Array of node-indices that make up the segment. If three columns are provided + Array of node indices that make up the segment. If three columns are provided, it is assumed that the segments are triangular - segid : int, optional - Segment set ID, by default 1 - title : str, optional - Title of the segment set + segid : int, default: 1 + Segment set ID. + title : str, default: "" + Title of the segment set. Returns ------- keywords.SetSegment - Formatted segment set keyword + Formatted segment set keyword. """ if segments.shape[1] < 3 or segments.shape[1] > 4: - raise ValueError("expecting segments to have 3 or 4 columns") + raise ValueError("Expecting segments to have 3 or 4 columns.") if segments.shape[1] == 3: # segtype = "triangle" @@ -186,24 +186,24 @@ def create_segment_set_keyword( def create_node_set_keyword( node_ids: np.ndarray, node_set_id: int = 1, title: str = "nodeset-title" ) -> keywords.SetNodeList: - """Create node set. + """Create a nodeset. Parameters ---------- node_ids : np.array - List of node ids to include in the node set - node_set_id : int, optional - Id of the node set, by default 1 - title : str, optional - Title of the node set, by default 'nodeset-title' + List of node IDs to include in the nodeset. + node_set_id : int, default: 1 + ID of the nodeset. + title : str, default: ``'nodeset-title'`` + Title of the nodeset Returns ------- keywords.SetNodeList - Formatted node set + Formatted nodeset. """ if not isinstance(node_ids, (np.ndarray, int, np.int32, np.int64, list)): - raise TypeError("Expecting node ids to be array of ints, list of ints or single int") + raise TypeError("Expecting node IDs to be array or list of integers or a single integer.") if isinstance(node_ids, (int, np.int32, np.int64)): node_ids = [node_ids] @@ -220,11 +220,12 @@ def create_node_set_keyword( def create_element_shell_keyword( shells: np.array, part_id: int = 1, id_offset: int = 0 ) -> keywords.ElementShell: - """Create element shell keyword. + """Create an element shell keyword. Notes ----- - From a numpy array of elements. Each row corresponds to an element. + This method creates an element shell keyword from a NumPy array of elements. + Each row corresponds to an element. """ num_shells = shells.shape[0] @@ -238,7 +239,7 @@ def create_element_shell_keyword( # element_type = "quad" columns = kw.elements.columns[0:6] else: - raise ValueError("Unknown type. Check size of shell array") + raise ValueError("Type is unknown. Check size of shell array.") # create element id array element_ids = np.arange(0, num_shells, 1) + 1 + id_offset @@ -250,7 +251,7 @@ def create_element_shell_keyword( return kw -def create_elemetn_solid_keyword( +def create_element_solid_keyword( elements: np.array, e_id: np.array, part_id: np.array, @@ -261,18 +262,18 @@ def create_elemetn_solid_keyword( Parameters ---------- elements : np.array - Numpy array of ints with element definition + NumPy array of integers with element definition. part_id : np.array - Part ids of each element + Part IDs of each element. e_id : np.array - Element ID - element_type : str, optional - Type of element to write, by default "tetra" + Element ID. + element_type : str, default: ``'tetra'`` + Type of element to write Returns ------- keywords.ElementSolid - Formatted *ELEMENT_SOLID keyword + Formatted *ELEMENT_SOLID keyword. """ kw = keywords.ElementSolid() df = pd.DataFrame(columns=kw.elements) @@ -308,22 +309,22 @@ def create_element_solid_ortho_keyword( Parameters ---------- elements : np.array - Numpy array of ints with element definition + NumPy array of integers with element definition a_vec : np.array - Vector specifying the A direction + Vector specifying the A direction. d_vec : np.array - Vector specifying the D direction + Vector specifying the D direction. part_id : np.array - Part ids of each element + Part IDs of each element. e_id : np.array - Element ID - element_type : str, optional - Type of element to write, by default "tetra" + Element ID. + element_type : str, default: ``'tetra'`` + Type of element to write. Returns ------- keywords.ElementSolidOrtho - Formatted *ELEMENT_SOLID_ORTHO keyword + Formatted *ELEMENT_SOLID_ORTHO keyword. """ kw = keywords.ElementSolidOrtho() @@ -385,11 +386,11 @@ def create_define_sd_orientation_kw( Parameters ---------- vectors : np.array - Array of shape Nx3 with the defined vector - vector_id_offset : int, optional - Offset for the vector id, by default 0 - iop : int, optional - Option, by default 0 + Array of shape Nx3 with the defined vector. + vector_id_offset : int, default: 0 + Offset for the vector ID. + iop : int, default: 0 + Option. """ kw = keywords.DefineSdOrientation() if len(vectors.shape) == 2: @@ -416,23 +417,24 @@ def create_discrete_elements_kw( scale_factor: Union[np.array, float], element_id_offset: int = 0, ) -> keywords.ElementDiscrete: - """Create discrete elements based on the input arguments. + """Create discrete elements based on input arguments. Parameters ---------- nodes : np.array - Nx2 Array with node ids used for the discrete element + Nx2 array with node IDs used for the discrete element. part_id : int - Part id of the discrete elements given + Part ID of the discrete elements given. vector_ids : Union[np.array, int] - Orientation ids (vector ids) along which the spring acts. - Can be either an array of length N, or a scalar integer + Orientation IDs (vector IDs) that the spring acts on. + You can provide either an array of length N or a scalar integer. scale_factor : Union[np.array, float] - Scale factor on forces, either an array of length N or scalar value - element_id_offset : int, optional - Offset value for the element ids, by default 0 - init_offset : float, optional - Initial offset: initial displacement or rotation at t=0, by default 0.0 + Scale factor on forces. You can provide either an array of length N + or a scalar value. + element_id_offset : int, default: 0 + Offset value for the element IDs. + init_offset : float, default: 0.0 + Initial offset, which is the initial displacement or rotation at t=0. """ num_elements = nodes.shape[0] @@ -462,23 +464,23 @@ def create_discrete_elements_kw( def get_list_of_used_ids(keyword_db: Deck, keyword_str: str) -> np.ndarray: - """Get array of used ids in the database. + """Get array of used IDs in the database. Notes ----- - E.g. for *SECTION, *PART and *MAT ids + For example, for *SECTION, you would get *PART and *MAT IDs. Parameters ---------- database : Deck - Database of keywords + Database of keywords. keyword : str - Keyword which to find + Keyword to find. Returns ------- np.ndarray - Array of ids (ints) which are already used + Array of IDs (integers) that are already used """ ids = np.empty(0, dtype=int) @@ -513,13 +515,13 @@ def get_list_of_used_ids(keyword_db: Deck, keyword_str: str) -> np.ndarray: if "SEGMENT" in kw.subkeyword: ids = np.append(ids, kw.sid) - # special treatment for node sets + # special treatment for nodesets if keyword_str == valid_kws[4]: for kw in keyword_db.get_kwds_by_type("SET"): if "NODE" in kw.subkeyword: ids = np.append(ids, kw.sid) - # special treatment for node sets + # special treatment for nodesets if keyword_str == valid_kws[6]: for kw in keyword_db.get_kwds_by_type("SET"): if "PART" in kw.subkeyword: @@ -542,7 +544,7 @@ def fast_element_writer( Notes ----- - Use this as an alternative to the dynalib writer + Use this method as an alternative to the dynalib writer. """ # TODO: generalize this writer diff --git a/src/ansys/health/heart/writer/material_keywords.py b/src/ansys/health/heart/writer/material_keywords.py index a370cd2e0..43e56faa1 100644 --- a/src/ansys/health/heart/writer/material_keywords.py +++ b/src/ansys/health/heart/writer/material_keywords.py @@ -25,10 +25,7 @@ Notes ----- -E.g.: -Mat295 -Mat077 -MatNull +Examples of material cards include Mat295, Mat077, MatNull. """ @@ -53,7 +50,7 @@ class MaterialCap(keywords.MatNull): Parameters ---------- keywords : keywords.MatNull - Inherits from Null type material + Inherits from the Null type material. """ def __init__(self, mid: int = 1): @@ -65,8 +62,16 @@ class MaterialNeoHook(custom_keywords.Mat077H): Parameters ---------- - Mat077H : Parent class - Parent class from which this material is derived + mid : int + Material ID. + rho : float + Density of the material. + c10 : float + First coefficient of the material. + nu : float + Poisson's ratio. + kappa : float + Bulk modulus. """ def __init__( @@ -83,7 +88,7 @@ def __init__( class MaterialHGOMyocardium(keywords.Mat295): - """HGO Material model - derived from Mat295.""" + """HGO material model, which is derived from Mat295.""" def __init__(self, id: int, mat: Mat295, ignore_active: bool = False): """Init a keyword of *mat295. @@ -91,11 +96,11 @@ def __init__(self, id: int, mat: Mat295, ignore_active: bool = False): Parameters ---------- id : int - material ID - mat : MAT295 - material data - ignore_active : bool, optional - IF igonre active module (e.g. for stress-free), by default False + Material ID. + mat : Mat295 + Material data. + ignore_active : bool, default: False + Whether to ignore the active module. For example, for stress-free. """ # 1st line super().__init__(mid=id) @@ -164,7 +169,7 @@ def active_curve( Parameters ---------- curve_name : str - Type of curve to compute + Type of curve to compute. """ # time array # T = np.arange( 0, endtime, timestep )