diff --git a/.github/workflows/pr-validate.yml b/.github/workflows/pr-validate.yml new file mode 100644 index 0000000000..7c6f7dddbe --- /dev/null +++ b/.github/workflows/pr-validate.yml @@ -0,0 +1,253 @@ +name: PR Validation + +on: + pull_request: + types: [opened, synchronize, reopened] + push: + branches: + - main +env: + DOCS_FOLDERS: "gh_action_testing" # space-separated list of folders to check + VALE_CONFIG_PATH: "vale.config/.vale.ini.default" + ## DOCS_FOLDERS: "zips/Sound-24R2-md markdown" + + +jobs: + validate-files: + runs-on: ubuntu-latest + + steps: + - name: Checkout PR files + uses: actions/checkout@v4 + + - name: Get changed files + id: changed + uses: tj-actions/changed-files@v45 + with: + files: | + **/*.zip + **/*.md + **/*.yaml + **/*.yml + **/*.json + + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install -y unzip + npm install -g swagger-cli + npm install -g @stoplight/spectral-cli + curl -sL https://github.com/errata-ai/vale/releases/download/v3.12.0/vale_3.12.0_Linux_64-bit.tar.gz | tar -xz + sudo mv vale /usr/local/bin/ + vale --version + spectral --version + + # --- Check links in DOCS_FOLDER --- + - name: Run lychee link checker + id: lychee + uses: lycheeverse/lychee-action@v2 + with: + args: ${{ env.DOCS_FOLDERS }} + continue-on-error: true + - name: Save lychee result + if: always() # run even if lychee failed + run: | + if [[ ${{ steps.lychee.outputs.exit_code }} != 0 ]]; then + echo "lychee_fails=1" >> $GITHUB_ENV + else + echo "lychee_fails=0" >> $GITHUB_ENV + fi + # --- Check ZIP files --- + - name: Validate ZIP files + if: steps.changed.outputs.any_changed == 'true' + run: | + fails=0 + for file in ${{ steps.changed.outputs.all_changed_files }}; do + if [[ "$file" == *.zip ]]; then + echo "πŸ” Checking $file" + if ! unzip -t "$file" > /dev/null; then + echo "❌ $file is corrupted" + fails=1 + else + while read entry; do + if [[ "$entry" == */* ]]; then + echo "❌ $file contains files inside folders: $entry" + fails=1 + fi + done < <(unzip -l "$file" | awk '{print $4}' | grep -v '/$') + fi + if [[ $fails -eq 0 ]]; then + echo "βœ… $file passed" + fi + fi + done + echo "zip_fails=$fails" >> $GITHUB_ENV + + # --- Check Markdown files with Vale --- + - name: Run Vale on Markdown files + if: steps.changed.outputs.any_changed == 'true' + run: | + vale sync + fails=0 + for file in ${{ steps.changed.outputs.all_changed_files }}; do + # Skip any files under .github/ + if [[ $file == *toc.yml || $file == *docfx.json || $file == .github/* || $file == .spectral.yaml ]]; then + echo "⏭️ Skipping non documentation file: $file" + continue + fi + if [[ "$file" == *.md || "$file" == *.yml || "$file" == *.yaml || "$file" == *.json ]]; then + echo "πŸ” Checking $file with Vale" + if ! vale "$file"; then + echo "❌ Vale failed for $file" + fails=1 + else + echo "βœ… $file passed" + fi + fi + done + echo "vale_fails=$fails" >> $GITHUB_ENV + + # --- Check Swagger compatibility for YAML/JSON --- + - name: Validate YAML/JSON with Swagger and Spectral + if: steps.changed.outputs.any_changed == 'true' + run: | + fails=0 + for file in ${{ steps.changed.outputs.all_changed_files }}; do + # Skip any files under .github/ + if [[ $file == *toc.yml || $file == *docfx.json || $file == .github/* || $file == .spectral.yaml ]]; then + echo "⏭️ Skipping non-swagger file: $file" + continue + fi + if [[ "$file" == *.yaml || "$file" == *.yml || "$file" == *.json ]]; then + echo "πŸ” Checking $file with swagger-cli" + if ! swagger-cli validate "$file"; then + echo "❌ Invalid OpenAPI spec: $file" + fails=1 + else + echo "βœ… $file passed swagger-cli validation" + fi + echo "πŸ” Linting $file with Spectral" + if ! spectral lint "$file"; then + echo "❌ Spectral found issues in $file" + fails=1 + else + echo "βœ… $file passed Spectral linting" + fi + fi + done + echo "swagger_fails=$fails" >> $GITHUB_ENV + + # --- Check for required markdown files --- + - name: Check for required markdown files + run: | + has_index=false + has_changelog=false + toc_has_introduction=true + toc_has_changelog=true + echo "checking changed files ${{ steps.changed.outputs.all_changed_files }}" + for file in ${{ steps.changed.outputs.all_changed_files }}; do + echo "Checking $file" + if [[ $file == *index.md ]]; then + echo "inside index check" + has_index=true + fi + if [[ $file == *changelog.md ]]; then + echo "inside changelog check" + has_changelog=true + fi + if [[ $file == *toc.yml ]]; then + echo "Found toc.yml, validating..." + first_two=$(head -n 2 "$file") + last_two=$(tail -n 2 "$file") + + expected_first=$'- name: Introduction\n href: index.md' + expected_last=$'- name: Changelog\n href: changelog.md' + + if [[ "$first_two" != "$expected_first" ]]; then + echo "First two lines do not match expected Introduction entry" + toc_has_introduction=false + fi + + if [[ "$last_two" != "$expected_last" ]]; then + echo "Last two lines do not match expected Changelog entry" + toc_has_changelog=false + fi + fi + done + echo "has_index=$has_index" >> $GITHUB_ENV + echo "has_changelog=$has_changelog" >> $GITHUB_ENV + echo "toc_has_introduction=$toc_has_introduction" >> $GITHUB_ENV + echo "toc_has_changelog=$toc_has_changelog" >> $GITHUB_ENV + + # --- Final summary & exit --- + - name: Report summary + run: | + echo "==== Validation Summary ====" + if [[ $zip_fails -ne 0 ]]; then + echo "❌ ZIP file validation failed" + else + echo "βœ… ZIP file validation passed" + fi + + if [[ $vale_fails -ne 0 ]]; then + echo "❌ Vale checks failed" + else + echo "βœ… Vale checks passed" + fi + + if [[ $swagger_fails -ne 0 ]]; then + echo "❌ Swagger validation failed" + else + echo "βœ… Swagger validation passed" + fi + if [[ $lychee_fails -ne 0 ]]; then + echo "❌ Link check failed" + else + echo "βœ… Link check passed" + fi + if [[ "$has_index" == "false" ]]; then + echo "❌ index.md is missing" + else + echo "βœ… index.md is present" + fi + if [[ $has_changelog == false ]]; then + echo "❌ changelog.md is missing" + else + echo "βœ… changelog.md is present" + fi + if [[ $toc_has_introduction == false ]]; then + echo "❌ toc.yml is missing Introduction entry" + else + echo "βœ… toc.yml has Introduction entry" + fi + if [[ $toc_has_changelog == false ]]; then + echo "❌ toc.yml is missing Changelog entry" + else + echo "βœ… toc.yml has Changelog entry" + fi + + # fail job if any failed + if [[ $zip_fails -ne 0 || $vale_fails -ne 0 || $swagger_fails -ne 0 || $lychee_fails -ne 0 || $has_index == 'false' || $has_changelog == 'false' || $toc_has_introduction == 'false' || $toc_has_changelog == 'false' ]]; then + exit 1 + fi + + # --- Add PR validation summary as comment --- + - name: Post summary as PR comment + if: always() + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: validation-summary + message: | + ## βœ… PR Validation Summary + + - ZIP validation: ${{ env.zip_fails == 0 && 'βœ… Passed' || '❌ Failed' }} + - Vale markdown checks: ${{ env.vale_fails == 0 && 'βœ… Passed' || '❌ Failed' }} + - Swagger checks: ${{ env.swagger_fails == 0 && 'βœ… Passed' || '❌ Failed' }} + - Link checking: ${{ env.lychee_fails == 0 && 'βœ… Passed' || '❌ Failed' }} + - index.md present: ${{ env.has_index == 'true' && 'βœ… Yes' || '❌ No' }} + - changelog.md present: ${{ env.has_changelog == 'true' && 'βœ… Yes' || '❌ No' }} + - toc.yml has Introduction entry: ${{ env.toc_has_introduction == 'true' && 'βœ… Yes' || '❌ No' }} + - toc.yml has Changelog entry: ${{ env.toc_has_changelog == 'true' && 'βœ… Yes' || '❌ No' }} + + _See full logs in [Actions run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})_ + \ No newline at end of file diff --git a/.github/workflows/vale_ai_fixer.yml b/.github/workflows/vale_ai_fixer.yml new file mode 100644 index 0000000000..6efb271699 --- /dev/null +++ b/.github/workflows/vale_ai_fixer.yml @@ -0,0 +1,50 @@ +name: Vale + AI Fixer + +on: + workflow_dispatch: + +jobs: + vale_ai_fixer: + runs-on: ubuntu-latest + + env: + AZURE_OPENAI_KEY: ${{ secrets.AZURE_OPENAI_KEY }} + AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} + OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install openai + + - name: Install Vale + run: | + curl -sL https://github.com/errata-ai/vale/releases/download/v3.12.0/vale_3.12.0_Linux_64-bit.tar.gz | tar -xz + sudo mv vale /usr/local/bin/ + vale sync + + - name: Run Vale + run: | + vale gh_action_testing/ --output=JSON > vale_report.json || true + + - name: Run AI fixer + run: | + python ai_fixer_azureopenai.py vale_report.json gh_action_testing + + - name: Diff changes and create patch (do NOT push) + run: | + git diff + git diff > ai_fixer.patch + - uses: actions/upload-artifact@v4 + with: + name: ai-fixer-patch + path: ai_fixer.patch \ No newline at end of file diff --git a/.spectral.yaml b/.spectral.yaml new file mode 100644 index 0000000000..84eecefb85 --- /dev/null +++ b/.spectral.yaml @@ -0,0 +1,30 @@ +extends: ["spectral:oas"] + +rules: + operation-summary-required: + description: "Every operation must have a summary." + given: "$.paths[*][*]" + then: + field: summary + function: truthy + + operation-description-required: + description: "Every operation must have a description." + given: "$.paths[*][*]" + then: + field: description + function: truthy + + response-description-required: + description: "Every response must have a description." + given: "$.paths[*][*].responses[*]" + then: + field: description + function: truthy + + response-examples-required: + description: "Responses should have examples." + given: "$.paths[*][*].responses[*].content.*" + then: + field: example + function: truthy \ No newline at end of file diff --git a/ai_fixer_azureopenai.py b/ai_fixer_azureopenai.py new file mode 100644 index 0000000000..c5c3158aab --- /dev/null +++ b/ai_fixer_azureopenai.py @@ -0,0 +1,108 @@ +import json +import os +from pathlib import Path +from openai import AzureOpenAI + +DEPLOYMENT_NAME = "gpt-5" + +AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY") +AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT") +OPENAI_API_VERSION = os.getenv("OPENAI_API_VERSION") + +if not AZURE_OPENAI_KEY: + raise RuntimeError("❌ Missing environment variable: AZURE_OPENAI_KEY") +if not AZURE_OPENAI_ENDPOINT: + raise RuntimeError("❌ Missing environment variable: AZURE_OPENAI_ENDPOINT") +if not OPENAI_API_VERSION: + raise RuntimeError("❌ Missing environment variable: OPENAI_API_VERSION") + +# Initialize AzureOpenAI client +client = AzureOpenAI( + api_key=AZURE_OPENAI_KEY, + azure_endpoint=AZURE_OPENAI_ENDPOINT, + api_version=OPENAI_API_VERSION, +) + +def get_azure_openai_suggestion(message, context, file, line, match_text): + """ + Requests a corrected version of the flagged text from Azure OpenAI. + Returns only the corrected line with heading markers; no explanation. + """ + user_prompt = f""" +Error: {message} +File: {file}, line {line} +Problematic text: "{match_text}" + +Context: +--- +{context} +--- + +Please return **only the corrected heading line**, including the correct Markdown heading marker (#, ##, ###). +Do not add any explanation, quotes, extra characters, or duplicate the content. +The output must be exactly what should appear in the Markdown file. +""" + + response = client.chat.completions.create( + model=DEPLOYMENT_NAME, + messages=[ + {"role": "system", "content": "You are a helpful writing assistant that improves documentation style."}, + {"role": "user", "content": user_prompt} + ], + max_completion_tokens=4096 + ) + + suggestion = response.choices[0].message.content.strip() + return suggestion.strip('"').strip("'").strip() + +def main(report_file, docs_dir, dry_run=False): + with open(report_file, "r", encoding="utf-8-sig") as f: + report = json.load(f) + + for filepath, issues in report.items(): + path = Path(filepath) + if not path.exists(): + path = Path(docs_dir) / filepath + + with open(path, "r", encoding="utf-8") as md: + lines = md.readlines() + + modified = False + + for issue in issues: + # Only fix headings + if issue["Check"] != "Google.Headings": + continue + + line_num = issue["Line"] + message = issue["Message"] + match_text = issue["Match"] + + context = "".join(lines[max(0, line_num-3): line_num+2]) + suggestion = get_azure_openai_suggestion(message, context, path, line_num, match_text) + + if dry_run: + print("="*60) + print(f"File: {path}, Line: {line_num}") + print(f"Vale Message: {message}") + print(f"Flagged Text: {match_text}") + print(f"Context:\n{context.strip()}") + print(f"πŸ’‘ Suggestion: {suggestion}\n") + else: + # Replace the entire line to avoid duplicate heading markers + lines[line_num-1] = suggestion + "\n" + modified = True + + if modified and not dry_run: + with open(path, "w", encoding="utf-8") as md: + md.writelines(lines) + print(f"βœ… Updated {path}") + +if __name__ == "__main__": + import sys + if len(sys.argv) < 3: + print("Usage: python ai_fixer_azureopenai.py [--dry-run]") + exit(1) + + dry_run = "--dry-run" in sys.argv + main(sys.argv[1], sys.argv[2], dry_run) \ No newline at end of file diff --git a/gh_action_testing/Ansys_Speos_API_2023R2.zip b/gh_action_testing/Ansys_Speos_API_2023R2.zip new file mode 100644 index 0000000000..87a9896254 Binary files /dev/null and b/gh_action_testing/Ansys_Speos_API_2023R2.zip differ diff --git a/gh_action_testing/asm_script_examples.md b/gh_action_testing/asm_script_examples.md new file mode 100644 index 0000000000..25c3afafea --- /dev/null +++ b/gh_action_testing/asm_script_examples.md @@ -0,0 +1,6 @@ +# SpeosASM script examples + +The following section provides you with a list of script samples to help you create your own script. + + - CADUpdate – [Importing a geometry](method_cadupdate_importing_geometry.md) + - CADUpdate – [Updating a geometry](method_cadupdate_updating_geometry.md) diff --git a/gh_action_testing/changelog.md b/gh_action_testing/changelog.md new file mode 100644 index 0000000000..669d0f9cf2 --- /dev/null +++ b/gh_action_testing/changelog.md @@ -0,0 +1 @@ +Simple file to test changelog.md presence \ No newline at end of file diff --git a/gh_action_testing/client-speos-asm.md b/gh_action_testing/client-speos-asm.md new file mode 100644 index 0000000000..bfaf10b115 --- /dev/null +++ b/gh_action_testing/client-speos-asm.md @@ -0,0 +1,21 @@ +# Speos asm + +## CAD update + +### Static method + +Name | Description | Syntax +--- | --- | --- +Import | Import an external CAD file (CATProduct, CATPart, prt...) under the target Part's Component. Use `null` value for the `targetPart` parameter to import in the active Part. | `bool Import(string externalCadFilePath, object targetPart)` +Import | Import an external CAD file (CATProduct, CATPart, prt...) under the target Part's Component. Use `null` value for the `targetPart` parameter to import in the active Part. | `bool Import(string externalCadFilePath, SpaceClaim.Api.V251.Part targetPart)` +GetImportedPartsUnder | Get all Parts that are associated to a CAD-Import process, starting from the given Part. | `IEnumerable GetImportedPartsUnder(object targetPart)` +GetImportedPartsUnder | Get all Parts that are associated to a CAD-Import process, starting from the given Part. | `IEnumerable GetImportedPartsUnder(SpaceClaim.Api.V251.Part targetPart)` +GetImportedPartsUnder | Get all Parts that are associated to a CAD-Import process, starting from the given Component. | `IEnumerable GetImportedPartsUnder(SpaceClaim.Api.V251.Component component)` +GetLastImportedFilePath | Get the file path previously used to import the given Part. Returns 'null' if no CAD-Import information is found for this Part. | `string GetLastImportedFilePath(object targetPart)` +GetLastImportedFilePath | Get the file path previously used to import the given Part. Returns 'null' if no CAD-Import information is found for this Part. | `string GetLastImportedFilePath(SpaceClaim.Api.V251.Part targetPart)` +GetLastImportedFileDateTime | Get the last write DateTime of the imported file at the time when the part was imported or updated. Returns `DateTime.MinValue` if no CAD-Import information is found for this Part. | `System.DateTime GetLastImportedFileDateTime(object targetPart)` +GetLastImportedFileDateTime | Get the last write DateTime of the imported file at the time when the part was imported or updated. Returns `DateTime.MinValue` if no CAD-Import information is found for this Part. | `System.DateTime GetLastImportedFileDateTime(SpaceClaim.Api.V251.Part targetPart)` +Update | Update a Part resulting from a previous CAD-Import with the same file path. Returns 'true' if and only if the Part was updated. | `bool Update(object targetPart, bool skipPartsWithUnknownPath, bool skipUnmodifiedFiles)` +Update | Update a Part resulting from a previous CAD-Import with the same file path. Returns 'true' if and only if the Part was updated. | `bool Update(SpaceClaim.Api.V251.Part targetPart, bool skipPartsWithUnknownPath, bool skipUnmodifiedFiles)` +UpdateAll | Update all previously imported Parts from a given root Part. Returns 'true' if and only if at least one Part has been updated. | `bool UpdateAll(object targetPart, bool skipPartsWithUnknownPath, bool skipUnmodifiedFiles)` +UpdateAll | Update all previously imported Parts from a given root Part. Returns 'true' if and only if at least one Part has been updated. | `bool UpdateAll(SpaceClaim.Api.V251.Part targetPart, bool skipPartsWithUnknownPath, bool skipUnmodifiedFiles)` diff --git a/gh_action_testing/index.md b/gh_action_testing/index.md new file mode 100644 index 0000000000..c8f98c46cc --- /dev/null +++ b/gh_action_testing/index.md @@ -0,0 +1,2 @@ +# Placeholder +TODO: Add .NET projects to the *src* folder and run `docfx` to generate **REAL** *API Documentation*! diff --git a/gh_action_testing/method_cadupdate_importing_geometry.md b/gh_action_testing/method_cadupdate_importing_geometry.md new file mode 100644 index 0000000000..7933d5e58c --- /dev/null +++ b/gh_action_testing/method_cadupdate_importing_geometry.md @@ -0,0 +1,16 @@ +# CADUpdate – importing a geometry + +The following script sample shows you an example on how to import a geometry. + +```ironpython +from System.IO import Path + +currentFilePath = GetRootPart().Document.Path +currentPath = Path.GetDirectoryName(currentFilePath) + +speFile1 = currentPath + "\\" + "lguide.prt" +speFile2 = currentPath + "\\" + "led.prt" + +stest1 = SpeosAsm.CADUpdate.Import(speFile1) +test2 = SpeosAsm.CADUpdate.Import(speFile2) +``` \ No newline at end of file diff --git a/gh_action_testing/speos_changelogs_asp_252.md b/gh_action_testing/speos_changelogs_asp_252.md new file mode 100644 index 0000000000..24d8545f85 --- /dev/null +++ b/gh_action_testing/speos_changelogs_asp_252.md @@ -0,0 +1,86 @@ +# Speos changelog 2025 R2 + +## New features + +### Ambient source MODTRAN + +**BETA feature**: As of version 2025 R2, you can create a Speos MODTRAN source using as input a radiance map file (.tp5, .json) generated out of the MODTRAN software. Therefore, you can simulate radiative transfer in the Earth's atmosphere by modeling the transmission of light through the atmosphere considering a complete spectrum from ultraviolet (UV) to far-infrared (FIR). + +- Added [SourceAmbientModtran](../sim/client-speos-sim.md#sourceambientmodtran) API +- Added [SourceAmbientModtran](../sim/client-speos-sim.md#sensorradiance) method to the **SensorRadiance** API + +### glTF import + +As of version 2025 R2, Speos can import glTF file formats. Thanks to glTF file formats, you can import and access assets (geometries + appearance properties) in Speos allowing you to create and simulate scenes with highly detailed models. + +- Added [ImportGltf](../sim/client-speos-sim.md#static-method) static methods to the **Command** API + +## Improvements + +### Optical Design Exchange: material management + +As of version 2025 R2, the Optical Design Exchange workflow has been improved to provide you with a better user experience. + +- The optical properties information are no longer integrated to the ODX feature. Now upon compute of an ODX feature, imported optical properties information are created as Material features in the Speos Simulation tree, and imported geometries are directly referenced in their respective Material features. +- The meshing information are no longer integrated to the ODX feature. Now upon compute of an ODX feature, imported meshing information are created as a Local Meshing feature in the Speos Simulation tree, and imported geometries are directly referenced the Local Meshing feature. + +As a consequence, the worklow has been standardized to correspond to a classical Speos workflow: indeed, you can now directly add to a Speos simulation the imported geometries from the Structure tree, as Materials and Local Meshing reference the geometries. + +The following changes have been made to the Speos API, impacting different APIs: + +- Sop API + - Removed [Sop](../sim/client-speos-sim.md) API +- Vop API + - Removed [Vop](../sim/client-speos-sim.md) API +- ComponentOpticStudio API + - Added [UpdateMaterialsOnly](../sim/client-speos-sim.md#componentopticstudio) property to the **ComponentOpticStudio** API + - Removed [GetGeometrySettings](../sim/client-speos-sim.md#componentopticstudio) static methods from the **ComponentOpticStudio** API +- GeometryOpticsStudio API + - Removed [FrontFaces](../sim/client-speos-sim.md#geometryopticsstudio) properties from the **GeometryOpticsStudio** API + - Removed [BackFaces](../sim/client-speos-sim.md#geometryopticsstudio) properties from the **GeometryOpticsStudio** API + - Removed [EdgeFaces](../sim/client-speos-sim.md#geometryopticsstudio) properties from the **GeometryOpticsStudio** API + - Removed [NewFaces](../sim/client-speos-sim.md#geometryopticsstudio) properties from the **GeometryOpticsStudio** API +- Lenses API + - Removed [MoveToFront](../sim/client-speos-sim.md#lenses) method from the **Lenses** API + - Removed [MoveToBack](../sim/client-speos-sim.md#lenses) method from the **Lenses** API + - Removed [MoveToEdge](../sim/client-speos-sim.md#lenses) method from the **Lenses** API + - Removed [FrontFaceSOP](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [BackFaceSOP](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [EdgeFaceSOP](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [FrontFaces](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [BackFaces](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [EdgeFaces](../sim/client-speos-sim.md#lenses) property from the **Lenses** API + - Removed [NewFaces](../sim/client-speos-sim.md#lenses) property from the **Lenses** API +- Material API + - Added [SopAppearancePath](../sim/client-speos-sim.md#material) properties to the **Material** API +- Surfaces API + - Removed [SurfaceOpticalProperties](../sim/client-speos-sim.md#surfaces) property from the **Surfaces** API + - Removed [FrontFaces](../sim/client-speos-sim.md#surfaces) property from the **Surfaces** API + - Removed [BackFaces](../sim/client-speos-sim.md#surfaces) property from the **Surfaces** API + - Removed [EdgeFaces](../sim/client-speos-sim.md#surfaces) property from the **Surfaces** API + - Removed [NewFaces](../sim/client-speos-sim.md#surfaces) property from the **Surfaces** API + +### Optical Design Exchange: lenses with apertures + +As of version 2025 R2, the Optical Design Exchange feature supports *.odx file exported from Ansys Zemax OpticStudio containing lenses with rectangular, elliptical, or circular apertures defined on surface. + +As a consequence, the following changes have been made to the Speos API, impacting different APIs: + +- Lenses API + - Added [BackFaceAperture](../sim/client-speos-sim.md#lenses) property to the **Lenses** API + - Added [FrontFaceAperture](../sim/client-speos-sim.md#lenses) property to the **Lenses** API +- SurfaceAperture API + - Added new [SurfaceAperture](../sim/client-speos-sim.md#surfaceaperture) API +- Surfaces API + - Added [SpeosOpticStudioSurfaceAperture](../sim/client-speos-sim.md#surfaces) property to the **Surfaces** API + +### Light guide + +Light Guide milling parameters now allow you to apply a fillet on the Side of the prisms +(Side milling) and the Border between the Light Guide body and the prisms (Border milling). Previously +only Top and Bottom prism millings were available. + +- Added [GetSideMillingTypePossibleValues](../des/client-speos-des.md#lightguide) method to the **LightGuide** API +- Added [GetBorderMillingTypePossibleValues](../des/client-speos-des.md#lightguide) method to the **LightGuide** API +- Added [SideMilling](../des/client-speos-des.md#lightguide) properties to the **LightGuide** API +- Added [BorderMilling](../des/client-speos-des.md#lightguide) properties to the **LightGuide** API diff --git a/gh_action_testing/toc.yml b/gh_action_testing/toc.yml new file mode 100644 index 0000000000..61734c93b2 --- /dev/null +++ b/gh_action_testing/toc.yml @@ -0,0 +1,4 @@ +- name: Introduction + href: index.md +- name: Changelog + href: changelog.md \ No newline at end of file diff --git a/vale.config/.vale.ini.default b/vale.config/.vale.ini.default new file mode 100644 index 0000000000..f87abbdce3 --- /dev/null +++ b/vale.config/.vale.ini.default @@ -0,0 +1,37 @@ +# Core settings +# ============= + +# Location of our `styles` +StylesPath = "_styles" + +# The options are `suggestion`, `warning`, or `error` (defaults to β€œwarning”). +MinAlertLevel = warning + +Packages = Hugo + +# By default, `code` and `tt` are ignored. +IgnoredScopes = code, tt + +# By default, `script`, `style`, `pre`, and `figure` are ignored. +SkippedScopes = script, style, pre, figure + +# WordTemplate specifies what Vale will consider to be an individual word. +WordTemplate = \b(?:%s)\b + +# List of Packages to be used for our guidelines +Packages = Google + +# Define the Ansys vocabulary +Vocab = ANSYS + +[*.{md,rst}] + +# Accept HUGO shortcuts/commands +BlockIgnores = (?s) *({{[<%] [^>%]* [>%]}}) + +# Apply the following styles +BasedOnStyles = Vale, Google + +# Removing Google-specific rule - Not applicable under some circumstances +Google.WordList = NO +Google.Colons = NO diff --git a/vale.config/.vale.ini.speos b/vale.config/.vale.ini.speos new file mode 100644 index 0000000000..678a07291e --- /dev/null +++ b/vale.config/.vale.ini.speos @@ -0,0 +1,37 @@ +# Core settings +# ============= + +# Location of our `styles` +StylesPath = "_styles_speos" + +# The options are `suggestion`, `warning`, or `error` (defaults to β€œwarning”). +MinAlertLevel = warning + +Packages = Hugo + +# By default, `code` and `tt` are ignored. +IgnoredScopes = code, tt + +# By default, `script`, `style`, `pre`, and `figure` are ignored. +SkippedScopes = script, style, pre, figure + +# WordTemplate specifies what Vale will consider to be an individual word. +WordTemplate = \b(?:%s)\b + +# List of Packages to be used for our guidelines +Packages = Google + +# Define the Ansys vocabulary +Vocab = ANSYS + +[*.{md,rst}] + +# Accept HUGO shortcuts/commands +BlockIgnores = (?s) *({{[<%] [^>%]* [>%]}}) + +# Apply the following styles +BasedOnStyles = Vale, Google + +# Removing Google-specific rule - Not applicable under some circumstances +Google.WordList = NO +Google.Colons = NO diff --git a/vale.config/_styles/config/vocabularies/ANSYS/accept.txt b/vale.config/_styles/config/vocabularies/ANSYS/accept.txt new file mode 100644 index 0000000000..6359eb819b --- /dev/null +++ b/vale.config/_styles/config/vocabularies/ANSYS/accept.txt @@ -0,0 +1,31 @@ +[Aa]nsys +Docsy +HUGO +GitHub Pages +Dev +Doxygen +Fortran +APIs +VSCode +repo +Markdownlint +Doxyfile +doxyconfig +hrefs +css +usergroup +metatag +[Dd]ocfx +[Nn]amespace +Pandoc +bookmap +subpage +PowerShell +Writage +Proto +mainpage +Docling + + + + diff --git a/vale.config/_styles/config/vocabularies/ANSYS/reject.txt b/vale.config/_styles/config/vocabularies/ANSYS/reject.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/vale.config/_styles_speos/config/vocabularies/ANSYS/accept.txt b/vale.config/_styles_speos/config/vocabularies/ANSYS/accept.txt new file mode 100644 index 0000000000..7df011bf98 --- /dev/null +++ b/vale.config/_styles_speos/config/vocabularies/ANSYS/accept.txt @@ -0,0 +1,30 @@ +[Aa]nsys +Docsy +HUGO +GitHub Pages +Dev +Doxygen +Fortran +APIs +VSCode +repo +Markdownlint +Doxyfile +doxyconfig +hrefs +css +usergroup +metatag +[Nn]amespace +Pandoc +bookmap +subpage +PowerShell +Writage +Proto +mainpage +Docling + + + + diff --git a/vale.config/_styles_speos/config/vocabularies/ANSYS/reject.txt b/vale.config/_styles_speos/config/vocabularies/ANSYS/reject.txt new file mode 100644 index 0000000000..e69de29bb2