diff --git a/.ci/generate_operators_doc.py b/.ci/generate_operators_doc.py deleted file mode 100644 index 7ad34ec33ca..00000000000 --- a/.ci/generate_operators_doc.py +++ /dev/null @@ -1,234 +0,0 @@ -import argparse -from pathlib import Path - -from jinja2 import Template - -from ansys.dpf import core as dpf -from ansys.dpf.core.changelog import Changelog -from ansys.dpf.core.core import load_library -from ansys.dpf.core.dpf_operator import available_operator_names - - -def initialize_server(ansys_path=None, include_composites=False, include_sound=False): - server = dpf.start_local_server(ansys_path=ansys_path) - print(server.plugins) - print(f"Ansys Path: {server.ansys_path}") - print(f"Server Info: {server.info}") - print(f"Server Context: {server.context}") - print(f"Server Config: {server.config}") - print(f"Server version: {dpf.global_server().version}") - if include_composites: - print("Loading Composites Plugin") - load_library( - filename=Path(server.ansys_path) - / "dpf" - / "plugins" - / "dpf_composites" - / "composite_operators.dll", - name="composites", - ) - if include_sound: - print("Loading Acoustics Plugin") - load_library( - filename=Path(server.ansys_path) / "Acoustics" / "SAS" / "ads" / "dpf_sound.dll", - name="sound", - ) - print(f"Loaded plugins: {list(server.plugins.keys())}") - return server - - -def fetch_doc_info(server, operator_name): - spec = dpf.Operator.operator_specification(op_name=operator_name, server=server) - input_info = [] - output_info = [] - configurations_info = [] - for input_pin in spec.inputs: - input = spec.inputs[input_pin] - input_info.append( - { - "pin_number": input_pin, - "name": input.name, - "types": [str(t) for t in input._type_names], - "document": input.document, - "optional": input.optional, - } - ) - for output_pin in spec.outputs: - output = spec.outputs[output_pin] - output_info.append( - { - "pin_number": output_pin, - "name": output.name, - "types": [str(t) for t in output._type_names], - "document": output.document, - "optional": output.optional, - } - ) - for configuration_key in spec.config_specification: - configuration = spec.config_specification[configuration_key] - configurations_info.append( - { - "name": configuration.name, - "types": [str(t) for t in configuration.type_names], - "document": configuration.document, - "default_value": configuration.default_value_str, - } - ) - properties = spec.properties - plugin = properties.pop("plugin", "N/A") - - category = properties.pop("category", None) - - scripting_name = properties.pop("scripting_name", None) - if category and scripting_name: - full_name = category + "." + scripting_name - else: - full_name = None - - user_name = properties.pop("user_name", operator_name) - - # Retrieve version and changelog using the Changelog class - if hasattr(spec, "changelog") and isinstance(spec.changelog, dpf.GenericDataContainer): - changelog_gdc = spec.changelog - changelog = Changelog(gdc=changelog_gdc, server=server) - last_version = changelog.last_version - changelog_entries = [ - f"Version {str(version)}: {changelog[version]}" for version in changelog.versions - ] - else: - last_version = "0.0.0" - changelog_entries = [f"Version {last_version}: Initial release."] - - op_friendly_name = user_name - if category: - op_friendly_name = category + ":" + op_friendly_name - - license = properties.pop("license", "None") - - exposure = properties.pop("exposure", "private") - scripting_info = { - "category": category, - "plugin": plugin, - "scripting_name": scripting_name, - "full_name": full_name, - "internal_name": operator_name, - "license": license, - "version": str(last_version), # Include last version in scripting_info - "changelog": changelog_entries, # Include all changelog entries - } - - return { - "operator_name": op_friendly_name, - "operator_description": spec.description, - "inputs": input_info, - "outputs": output_info, - "configurations": configurations_info, - "scripting_info": scripting_info, - "exposure": exposure, - } - - -def get_plugin_operators(server, plugin_name): - operators = available_operator_names(server) - plugin_operators = [] - for operator_name in operators: - spec = dpf.Operator.operator_specification(op_name=operator_name, server=server) - if "plugin" in spec.properties and spec.properties["plugin"] == plugin_name: - plugin_operators.append(operator_name) - return plugin_operators - - -def generate_operator_doc(server, operator_name, include_private): - operator_info = fetch_doc_info(server, operator_name) - scripting_name = operator_info["scripting_info"]["scripting_name"] - category = operator_info["scripting_info"]["category"] - if scripting_name: - file_name = scripting_name - else: - file_name = operator_name - if "::" in file_name: - file_name = file_name.replace("::", "_") - if not include_private and operator_info["exposure"] == "private": - return - script_path = Path(__file__) - root_dir = script_path.parent.parent - template_dir = Path(root_dir) / "doc" / "source" / "operators_doc" / "operator-specifications" - category_dir = Path(template_dir) / category - if category is not None: - category_dir.mkdir(parents=True, exist_ok=True) # Ensure all parent directories are created - file_dir = category_dir - else: - file_dir = template_dir - with Path.open(Path(template_dir) / "operator_doc_template.md", "r") as file: - template = Template(file.read()) - - output = template.render(operator_info) - with Path.open(Path(file_dir) / f"{file_name}.md", "w") as file: - file.write(output) - - -def generate_toc_tree(docs_path): - # Target the operator-specifications folder for iteration - # operator_specs_path = docs_path / "operator-specifications" - data = [] - for folder in docs_path.iterdir(): - if folder.is_dir(): # Ensure 'folder' is a directory - category = folder.name - operators = [] # Reset operators for each category - for file in folder.iterdir(): - if ( - file.is_file() and file.suffix == ".md" - ): # Ensure 'file' is a file with .md extension - file_name = file.name - file_path = f"{category}/{file_name}" - operator_name = file_name.replace("_", " ").replace(".md", "") - operators.append({"operator_name": operator_name, "file_path": file_path}) - data.append({"category": category, "operators": operators}) - - # Render the Jinja2 template - template_path = docs_path / "toc_template.j2" - with Path.open(template_path, "r") as template_file: - template = Template(template_file.read()) - output = template.render(data=data) # Pass 'data' as a named argument - - # Write the rendered output to toc.yml at the operators_doc level - # toc_path = docs_path / "toc.yml" - with Path.open(docs_path / "toc.yml", "w") as file: - file.write(output) - - -def main(): - parser = argparse.ArgumentParser(description="Fetch available operators") - parser.add_argument("--plugin", help="Filter operators by plugin") - parser.add_argument( - "--ansys_path", default=None, help="Path to Ansys DPF Server installation directory" - ) - parser.add_argument("--include_private", action="store_true", help="Include private operators") - parser.add_argument( - "--include_composites", action="store_true", help="Include composites operators" - ) - parser.add_argument("--include_sound", action="store_true", help="Include sound operators") - args = parser.parse_args() - desired_plugin = args.plugin - - server = initialize_server(args.ansys_path, args.include_composites, args.include_sound) - if desired_plugin is None: - operators = available_operator_names(server) - else: - operators = get_plugin_operators(server, desired_plugin) - for operator_name in operators: - generate_operator_doc(server, operator_name, args.include_private) - - docs_path = ( - Path(__file__).parent.parent - / "doc" - / "source" - / "operators_doc" - / "operator-specifications" - ) - print(docs_path) - generate_toc_tree(docs_path) - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/scripts/separate_long_core_tests.ps1 b/.github/workflows/scripts/separate_long_core_tests.ps1 index d97dcd4199b..46ae1108179 100644 --- a/.github/workflows/scripts/separate_long_core_tests.ps1 +++ b/.github/workflows/scripts/separate_long_core_tests.ps1 @@ -7,6 +7,7 @@ New-Item -Path ".\" -Name "test_remote_workflow" -ItemType "directory" New-Item -Path ".\" -Name "test_remote_operator" -ItemType "directory" New-Item -Path ".\" -Name "test_service" -ItemType "directory" New-Item -Path ".\" -Name "test_custom_type_field" -ItemType "directory" +New-Item -Path ".\" -Name "test_documentation" -ItemType "directory" Copy-Item -Path "tests\conftest.py" -Destination ".\test_launcher\" Copy-Item -Path "tests\conftest.py" -Destination ".\test_server\" Copy-Item -Path "tests\conftest.py" -Destination ".\test_local_server\" @@ -16,6 +17,7 @@ Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_workflow\" Copy-Item -Path "tests\conftest.py" -Destination ".\test_remote_operator\" Copy-Item -Path "tests\conftest.py" -Destination ".\test_service\" Copy-Item -Path "tests\conftest.py" -Destination ".\test_custom_type_field\" +Copy-Item -Path "tests\conftest.py" -Destination ".\test_documentation\" Copy-Item -Path "tests\test_launcher.py" -Destination ".\test_launcher\" Copy-Item -Path "tests\test_server.py" -Destination ".\test_server\" Copy-Item -Path "tests\test_local_server.py" -Destination ".\test_local_server\" @@ -25,6 +27,7 @@ Copy-Item -Path "tests\test_remote_workflow.py" -Destination ".\test_remote_work Copy-Item -Path "tests\test_remote_operator.py" -Destination ".\test_remote_operator\" Copy-Item -Path "tests\test_service.py" -Destination ".\test_service\" Copy-Item -Path "tests\test_custom_type_field.py" -Destination ".\test_custom_type_field\" +Copy-Item -Path "tests\test_documentation.py" -Destination ".\test_documentation\" Remove-Item -Path "tests\test_server.py" Remove-Item -Path "tests\test_launcher.py" Remove-Item -Path "tests\test_local_server.py" @@ -33,4 +36,5 @@ Remove-Item -Path "tests\test_workflow.py" Remove-Item -Path "tests\test_remote_workflow.py" Remove-Item -Path "tests\test_remote_operator.py" Remove-Item -Path "tests\test_service.py" -Remove-Item -Path "tests\test_custom_type_field.py" \ No newline at end of file +Remove-Item -Path "tests\test_custom_type_field.py" +Remove-Item -Path "tests\test_documentation.py" \ No newline at end of file diff --git a/.github/workflows/test_docker.yml b/.github/workflows/test_docker.yml index 425d2dfaf74..4099956cfc8 100644 --- a/.github/workflows/test_docker.yml +++ b/.github/workflows/test_docker.yml @@ -205,6 +205,15 @@ jobs: command: | pytest $DEBUG $COVERAGE $RERUNS --junitxml=../tests/junit/test-results12.xml tests/operators/. + - name: "Test Documentation" + uses: nick-fields/retry@v3 + with: + timeout_minutes: 4 + max_attempts: 2 + shell: bash + command: | + pytest $DEBUG $COVERAGE $RERUNS --junitxml=../tests/junit/test-results13.xml test_documentation/. + # - name: "Test API Entry" # shell: bash # working-directory: tests diff --git a/pyproject.toml b/pyproject.toml index 7ba8653cb7f..a33cf9b1e2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,12 +60,20 @@ plotting = [ "imageio-ffmpeg", ] +operator_doc = [ + # Operator documentation generation + "jinja2", +] + [project.urls] Homepage = "https://dpf.docs.pyansys.com/" Documentation = "https://dpf.docs.pyansys.com/" Source = "https://github.com/ansys/pydpf-core" Tracker = "https://github.com/ansys/pydpf-core/issues" +[project.scripts] +dpf_generate_operators_doc = "ansys.dpf.core.documentation.generate_operators_doc:run_with_args" + [tool.ruff] line-length = 100 @@ -115,4 +123,5 @@ where = ["src"] [tool.setuptools.package-data] "ansys.dpf.gatebin" = ["*.so", "*.dll"] +"ansys.dpf.core.documentation" = ["toc_template.j2", "operator_doc_template.md"] diff --git a/requirements/requirements_test.txt b/requirements/requirements_test.txt index a8193452a45..cdf07ca69b4 100644 --- a/requirements/requirements_test.txt +++ b/requirements/requirements_test.txt @@ -3,6 +3,7 @@ coverage==7.10.6 graphviz==0.21 imageio==2.37.0 imageio-ffmpeg==0.6.0 +jinja2==3.1.6 pypandoc_binary==1.15 pytest==8.4.2 pytest-cov==6.2.1 diff --git a/src/ansys/dpf/core/__init__.py b/src/ansys/dpf/core/__init__.py index b33f850a3a2..08a256525f6 100644 --- a/src/ansys/dpf/core/__init__.py +++ b/src/ansys/dpf/core/__init__.py @@ -99,6 +99,7 @@ AvailableServerContexts, LicenseContextManager ) +from ansys.dpf.core.server_types import AnyServerType # noqa: F401 from ansys.dpf.core.unit_system import UnitSystem, unit_systems from ansys.dpf.core.incremental import IncrementalHelper, split_workflow_in_chunks from ansys.dpf.core.any import Any diff --git a/src/ansys/dpf/core/documentation/__init__.py b/src/ansys/dpf/core/documentation/__init__.py new file mode 100644 index 00000000000..6aa139810bb --- /dev/null +++ b/src/ansys/dpf/core/documentation/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) 2020 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +"""Documentation generation tools.""" diff --git a/src/ansys/dpf/core/documentation/generate_operators_doc.py b/src/ansys/dpf/core/documentation/generate_operators_doc.py new file mode 100644 index 00000000000..f35d3f28345 --- /dev/null +++ b/src/ansys/dpf/core/documentation/generate_operators_doc.py @@ -0,0 +1,402 @@ +# Copyright (C) 2020 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +"""Generation of Markdown documentation source files for operators of a given DPF installation.""" + +from __future__ import annotations + +import argparse +from os import PathLike +from pathlib import Path + +from ansys.dpf import core as dpf +from ansys.dpf.core.changelog import Changelog +from ansys.dpf.core.core import load_library +from ansys.dpf.core.dpf_operator import available_operator_names + + +class Jinja2ImportError(ModuleNotFoundError): # pragma: nocover + """Error raised when Jinja2 could not be imported during operator documentation generation.""" + + def __init__( + self, + msg="To generate Markdown documentation of operators, please install jinja2 with:\n" + "pip install jinja2", + ): + ModuleNotFoundError.__init__(self, msg) + + +try: + import jinja2 +except ModuleNotFoundError: # pragma: nocover + raise Jinja2ImportError + + +def initialize_server( + ansys_path: str | PathLike = None, + include_composites: bool = False, + include_sound: bool = False, + verbose: bool = False, +) -> dpf.AnyServerType: + """Initialize a DPF server for a given installation folder by loading required plugins. + + Parameters + ---------- + ansys_path: + Path to the DPF installation to use to start a server. + include_composites: + Whether to generate documentation for operators of the Composites plugin. + include_sound: + Whether to generate documentation for operators of the Sound DPF plugin. + verbose: + Whether to print progress information. + + Returns + ------- + server: + A running DPF server to generate operator documentation for. + + """ + server = dpf.start_local_server(ansys_path=ansys_path) + if verbose: # pragma: nocover + print(f"Ansys Path: {server.ansys_path}") + print(f"Server Info: {server.info}") + print(f"Server Context: {server.context}") + print(f"Server Config: {server.config}") + print(f"Server version: {dpf.global_server().version}") + if include_composites: # pragma: nocover + if verbose: + print("Loading Composites Plugin") + if server.os == "nt": + binary_name = "composite_operators.dll" + else: + binary_name = "libcomposite_operators.so" + load_library( + filename=Path(server.ansys_path) / "dpf" / "plugins" / "dpf_composites" / binary_name, + name="composites", + ) + if include_sound and server.os == "nt": # pragma: nocover + if verbose: + print("Loading Acoustics Plugin") + load_library( + filename=Path(server.ansys_path) / "Acoustics" / "SAS" / "ads" / "dpf_sound.dll", + name="sound", + ) + if verbose: # pragma: nocover + print(f"Loaded plugins: {list(server.plugins.keys())}") + return server + + +def fetch_doc_info(server: dpf.AnyServerType, operator_name: str) -> dict: + """Fetch information about the specifications of a given operator. + + Parameters + ---------- + server: + A DPF server to query the specifications of the operator. + operator_name: + The name of the operator of interest. + + Returns + ------- + doc_info: + Information about the operator structured for use with the documentation template. + + """ + spec = dpf.Operator.operator_specification(op_name=operator_name, server=server) + input_info = [] + output_info = [] + configurations_info = [] + for input_pin in spec.inputs: + input_pin_info = spec.inputs[input_pin] + input_info.append( + { + "pin_number": input_pin, + "name": input_pin_info.name, + "types": [str(t) for t in input_pin_info._type_names], + "document": input_pin_info.document, + "optional": input_pin_info.optional, + } + ) + for output_pin in spec.outputs: + output = spec.outputs[output_pin] + output_info.append( + { + "pin_number": output_pin, + "name": output.name, + "types": [str(t) for t in output._type_names], + "document": output.document, + "optional": output.optional, + } + ) + for configuration_key in spec.config_specification: + configuration = spec.config_specification[configuration_key] + configurations_info.append( + { + "name": configuration.name, + "types": [str(t) for t in configuration.type_names], + "document": configuration.document, + "default_value": configuration.default_value_str, + } + ) + properties = spec.properties + plugin = properties.pop("plugin", "N/A") + + category = properties.pop("category", None) + + scripting_name = properties.pop("scripting_name", None) + if category and scripting_name: + full_name = category + "." + scripting_name + else: + full_name = None + + user_name = properties.pop("user_name", operator_name) + + # Retrieve version and changelog using the Changelog class + if hasattr(spec, "changelog") and isinstance(spec.changelog, dpf.GenericDataContainer): + changelog_gdc = spec.changelog + changelog = Changelog(gdc=changelog_gdc, server=server) + last_version = changelog.last_version + changelog_entries = [ + f"Version {str(version)}: {changelog[version]}" for version in changelog.versions + ] + else: + last_version = "0.0.0" + changelog_entries = [f"Version {last_version}: Initial release."] + + op_friendly_name = user_name + if category: + op_friendly_name = category + ":" + op_friendly_name + + license_type = properties.pop("license", "None") + + exposure = properties.pop("exposure", "private") + scripting_info = { + "category": category, + "plugin": plugin, + "scripting_name": scripting_name, + "full_name": full_name, + "internal_name": operator_name, + "license": license_type, + "version": str(last_version), # Include last version in scripting_info + "changelog": changelog_entries, # Include all changelog entries + } + + return { + "operator_name": op_friendly_name, + "operator_description": spec.description, + "inputs": input_info, + "outputs": output_info, + "configurations": configurations_info, + "scripting_info": scripting_info, + "exposure": exposure, + } + + +def get_plugin_operators(server: dpf.AnyServerType, plugin_name: str) -> list[str]: + """Get the list of operators for a given plugin. + + Parameters + ---------- + server: + DPF server to query for the list of operators. + plugin_name: + Name of the plugin of interest. + + Returns + ------- + operator_list: + List of names of operators available on the server for the given plugin. + + """ + operators = available_operator_names(server) + plugin_operators = [] + for operator_name in operators: + spec = dpf.Operator.operator_specification(op_name=operator_name, server=server) + if "plugin" in spec.properties and spec.properties["plugin"] == plugin_name: + plugin_operators.append(operator_name) + return plugin_operators + + +def generate_operator_doc( + server: dpf.AnyServerType, operator_name: str, include_private: bool, output_path: Path +): + """Write the Markdown documentation page for a given operator on a given DPF server. + + Parameters + ---------- + server: + DPF server of interest. + operator_name: + Name of the operator of interest. + include_private: + Whether to generate the documentation if the operator is private. + output_path: + Path to write the operator documentation at. + + """ + operator_info = fetch_doc_info(server, operator_name) + scripting_name = operator_info["scripting_info"]["scripting_name"] + category = operator_info["scripting_info"]["category"] + if scripting_name: + file_name = scripting_name + else: + file_name = operator_name + if "::" in file_name: + file_name = file_name.replace("::", "_") + if not include_private and operator_info["exposure"] == "private": + return + template_path = Path(__file__).parent / "operator_doc_template.md" + spec_folder = output_path / "operator-specifications" + category_dir = spec_folder / category + spec_folder.mkdir(parents=True, exist_ok=True) + if category is not None: + category_dir.mkdir(parents=True, exist_ok=True) # Ensure all parent directories are created + file_dir = category_dir + else: + file_dir = output_path / "operator-specifications" + with Path.open(template_path, "r") as file: + template = jinja2.Template(file.read()) + + output = template.render(operator_info) + with Path.open(Path(file_dir) / f"{file_name}.md", "w") as file: + file.write(output) + + +def generate_toc_tree(docs_path: Path): + """Write the global toc.yml file for the DPF documentation based on the operators found. + + Parameters + ---------- + docs_path: + Path to the root of the DPF documentation sources. + + """ + data = [] + specs_path = docs_path / "operator-specifications" + for folder in specs_path.iterdir(): + if folder.is_dir(): # Ensure 'folder' is a directory + category = folder.name + operators = [] # Reset operators for each category + for file in folder.iterdir(): + if ( + file.is_file() and file.suffix == ".md" + ): # Ensure 'file' is a file with .md extension + file_name = file.name + file_path = f"{category}/{file_name}" + operator_name = file_name.replace("_", " ").replace(".md", "") + operators.append({"operator_name": operator_name, "file_path": file_path}) + data.append({"category": category, "operators": operators}) + + # Render the Jinja2 template + template_path = Path(__file__).parent / "toc_template.j2" + with Path.open(template_path, "r") as template_file: + template = jinja2.Template(template_file.read()) + output = template.render(data=data) # Pass 'data' as a named argument + + # Write the rendered output to toc.yml at the operators_doc level + with Path.open(docs_path / "toc.yml", "w") as file: + file.write(output) + + +def generate_operators_doc( + ansys_path: Path, + output_path: Path, + include_composites: bool = False, + include_sound: bool = False, + include_private: bool = False, + desired_plugin: str = None, + verbose: bool = True, +): + """Generate the Markdown source files for the DPF operator documentation. + + This function generates a Markdown file for each operator found in a given DPF installation, + categorized in folders per operator category, as well as a `toc.yml` file. + These are used to generate the DPF html documentation website as seen on the Developer Portal. + + Parameters + ---------- + ansys_path: + Path to an Ansys/DPF installation. + output_path: + Path to write the output files at. + include_composites: + Whether to include operators of the Composites plugin. + include_sound: + Whether to include operators of the Sound plugin. + include_private: + Whether to include private operators. + desired_plugin: + Restrict documentation generation to the operators of this specific plugin. + verbose: + Whether to print progress information. + + """ + server = initialize_server(ansys_path, include_composites, include_sound, verbose) + if desired_plugin is None: + operators = available_operator_names(server) + else: + operators = get_plugin_operators(server, desired_plugin) + for operator_name in operators: + generate_operator_doc(server, operator_name, include_private, output_path) + generate_toc_tree(output_path) + + +def run_with_args(): # pragma: nocover + """Run generate_operators_doc from the command line with argument parsing.""" + parser = argparse.ArgumentParser( + description="Generate the operator documentation sources for operators of a given DPF installation." + ) + parser.add_argument( + "--ansys_path", default=None, help="Path to Ansys DPF Server installation directory" + ) + parser.add_argument( + "--output_path", default=None, help="Path to output directory", required=True + ) + parser.add_argument("--include_private", action="store_true", help="Include private operators") + parser.add_argument( + "--include_composites", action="store_true", help="Include Composites operators" + ) + parser.add_argument( + "--include_sound", action="store_true", help="Include Sound operators (Windows only)" + ) + parser.add_argument("--plugin", help="Restrict to the given plugin.") + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=True, + help="Print script progress information.", + ) + args = parser.parse_args() + + generate_operators_doc( + ansys_path=args.ansys_path, + output_path=args.output_path, + include_composites=args.include_composites, + include_sound=args.include_sound, + include_private=args.include_private, + desired_plugin=args.plugin, + ) + + +if __name__ == "__main__": # pragma: nocover + run_with_args() diff --git a/doc/source/operators_doc/operator-specifications/operator_doc_template.md b/src/ansys/dpf/core/documentation/operator_doc_template.md similarity index 100% rename from doc/source/operators_doc/operator-specifications/operator_doc_template.md rename to src/ansys/dpf/core/documentation/operator_doc_template.md diff --git a/doc/source/operators_doc/operator-specifications/toc_template.j2 b/src/ansys/dpf/core/documentation/toc_template.j2 similarity index 100% rename from doc/source/operators_doc/operator-specifications/toc_template.j2 rename to src/ansys/dpf/core/documentation/toc_template.j2 diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 68a1d38528d..ad4d86013b6 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -48,7 +48,7 @@ ServerConfig, ServerFactory, ) -from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, BaseServer +from ansys.dpf.core.server_types import DPF_DEFAULT_PORT, LOCALHOST, RUNNING_DOCKER, AnyServerType def shutdown_global_server(): @@ -73,7 +73,7 @@ def has_local_server(): return dpf.core.SERVER is not None -def _global_server() -> BaseServer: +def _global_server() -> AnyServerType: """Retrieve the global server if it exists. If the global server has not been specified, check the expected server type in @@ -162,7 +162,7 @@ def start_local_server( config=None, use_pypim_by_default=True, context=None, -) -> BaseServer: +) -> AnyServerType: """Start a new local DPF server at a given port and IP address. This method requires Windows and ANSYS 2021 R1 or later. If ``as_global=True``, which is @@ -399,12 +399,12 @@ def connect(): raise e -def get_or_create_server(server: BaseServer | None) -> Union[BaseServer, None]: +def get_or_create_server(server: AnyServerType | None) -> Union[AnyServerType, None]: """Return the given server or if None, creates a new one. Parameters ---------- - server: BaseServer, None + server: Returns ------- diff --git a/tests/test_documentation.py b/tests/test_documentation.py new file mode 100644 index 00000000000..7b42017b3ee --- /dev/null +++ b/tests/test_documentation.py @@ -0,0 +1,47 @@ +# Copyright (C) 2020 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from pathlib import Path + +import ansys.dpf.core as dpf +from ansys.dpf.core.documentation.generate_operators_doc import generate_operators_doc + + +def test_generate_operators_doc(tmp_path: Path): + generate_operators_doc(ansys_path=dpf.SERVER.ansys_path, output_path=tmp_path, verbose=False) + file_to_test = tmp_path / "toc.yml" + assert file_to_test.exists() + file_to_test = tmp_path / "operator-specifications" / "utility" / "forward.md" + assert file_to_test.exists() + + +def test_generate_operators_doc_plugin(tmp_path: Path): + generate_operators_doc( + ansys_path=dpf.SERVER.ansys_path, + output_path=tmp_path, + verbose=False, + desired_plugin="core", + ) + file_to_test = tmp_path / "toc.yml" + assert file_to_test.exists() + file_to_test = tmp_path / "operator-specifications" / "utility" / "forward.md" + assert file_to_test.exists() diff --git a/tox.ini b/tox.ini index 96aa9c5663d..192885679a1 100644 --- a/tox.ini +++ b/tox.ini @@ -29,12 +29,12 @@ [tox] description = Default tox environment list and core configurations -envlist = pretest,test-{api,launcher,server,local_server,multi_server,api_entry,custom_type_field,operators,workflow,remote_workflow,remote_operator,service},posttest,kill-servers +envlist = pretest,test-{api,documentation,launcher,server,local_server,multi_server,api_entry,custom_type_field,operators,workflow,remote_workflow,remote_operator,service},posttest,kill-servers labels = - localparalleltests = pretest,test-{api,launcher,server,local_server,multi_server,custom_type_field,operators},posttest,kill-servers + localparalleltests = pretest,test-{api,documentation,launcher,server,local_server,multi_server,custom_type_field,operators},posttest,kill-servers othertests = pretest,test-{workflow,remote_workflow,remote_operator,service},posttest,kill-servers - ciparalleltests = test-{api,launcher,local_server,multi_server,custom_type_field,operators},kill-servers + ciparalleltests = test-{api,documentation,launcher,local_server,multi_server,custom_type_field,operators},kill-servers isolated_build_env = build @@ -95,14 +95,14 @@ commands_pre = commands = python -c "\ import os, shutil; \ - test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \ + test_data=['test_documentation','test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \ [(os.makedirs(d, exist_ok=True), shutil.copy('tests/conftest.py', d), shutil.copy(f'tests/\{d}.py', d) if os.path.exists(f'tests/\{d}.py') else None) for d in test_data]; \ [os.remove(f'tests/\{d}.py') for d in test_data if os.path.exists(f'tests/\{d}.py')]" [testenv:posttest] description = Environment to revert test files to original state after testing -depends = pretest, test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators} +depends = pretest, test-{api,documentation,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators} skip_install = True @@ -113,14 +113,15 @@ commands_pre = commands = python -c "\ import os, shutil; \ - test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \ + test_data=['test_documentation','test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \ [shutil.move(f'\{d}/\{d}.py', f'tests/\{d}.py') for d in test_data if os.path.exists(f'\{d}/\{d}.py')]; \ [shutil.rmtree(d) for d in test_data if os.path.exists(d)]" -[testenv:test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators}] +[testenv:test-{api,documentation,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators}] description = Environment for running api: api tests + documentation: documentation tests launcher: launcher tests server: server tests local_server: local server tests @@ -154,6 +155,7 @@ setenv = api_entry: JUNITXML = --junitxml=tests/junit/test-results10.xml -o junit_family=legacy custom_type_field: JUNITXML = --junitxml=tests/junit/test-results11.xml -o junit_family=legacy operators: JUNITXML = --junitxml=tests/junit/test-results12.xml -o junit_family=legacy + documentation: JUNITXML = --junitxml=tests/junit/test-results13.xml -o junit_family=legacy # Tests sets api: PYTEST_PYTHON_FILES = tests @@ -168,6 +170,7 @@ setenv = api_entry: PYTEST_PYTHON_FILES = tests/entry custom_type_field: PYTEST_PYTHON_FILES = test_custom_type_field operators: PYTEST_PYTHON_FILES = tests/operators + documentation: PYTEST_PYTHON_FILES = test_documentation TEMP = {env_tmp_dir} TMP = {env_tmp_dir}