diff --git a/.github/workflows/1_run_interoperability_tests.yml b/.github/workflows/1_run_interoperability_tests.yml index 2929ef3a..720883cf 100644 --- a/.github/workflows/1_run_interoperability_tests.yml +++ b/.github/workflows/1_run_interoperability_tests.yml @@ -1,401 +1,106 @@ name: 1 - Run Interoperability Tests -run-name: ${{ github.actor }} is testing out GitHub Actions 🚀 -env: - # comma separated list of tests to skip - SKIP_EXEC: "connext_dds-6.1.2_shape_main_linux" -on: workflow_dispatch +run-name: Run Interoperability Tests +on: + workflow_dispatch: + inputs: + publishers: + description: Publishers to use + type: string + default: '["connext_dds","dust_dds","eprosima_fastdds","intercom_dds","opendds","toc_coredx_dds"]' + subscribers: + description: Subscribers to use + type: string + default: '["connext_dds","dust_dds","eprosima_fastdds","intercom_dds","opendds","toc_coredx_dds"]' jobs: generate_timestamp: runs-on: ubuntu-latest steps: - - name: Generate timestamp file - run: date '+%Y-%m-%d-%H_%M_%S' > timestamp - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: timestamp - path: | - ./timestamp - connext_dds: + - name: Generate timestamp file + run: date '+%Y-%m-%d-%H_%M_%S' > timestamp + - name: Upload timestamp file + uses: actions/upload-artifact@v4 + with: + name: timestamp + path: | + ./timestamp + + run_tests: runs-on: ubuntu-latest needs: generate_timestamp + strategy: + matrix: + publisher: ${{ fromJson(github.event.inputs.publishers) }} + subscriber: ${{ fromJson(github.event.inputs.subscribers) }} + steps: - - name: Checkout - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: timestamp - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in connext_dds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - extra_args=""; \ - if [[ "${subscriber,,}" == *opendds* ]]; then \ - extra_args="--periodic-announcement 5000"; \ - fi; \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml $extra_args; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_connext_dds - path: | - ./junit_interoperability_report.xml - ./timestamp - dust_dds: - runs-on: ubuntu-latest - needs: connext_dds - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: interoperability_report_connext_dds - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in dust_dds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_dust_dds - path: | - ./junit_interoperability_report.xml - ./timestamp - eprosima_fastdds: - runs-on: ubuntu-latest - needs: dust_dds - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: interoperability_report_dust_dds - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in eprosima_fastdds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_eprosima_fastdds - path: | - ./junit_interoperability_report.xml - ./timestamp - intercom_dds: - runs-on: ubuntu-latest - needs: eprosima_fastdds - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: interoperability_report_eprosima_fastdds - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in intercom_dds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_intercom_dds - path: | - ./junit_interoperability_report.xml - ./timestamp - opendds: - runs-on: ubuntu-latest - needs: intercom_dds - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: interoperability_report_intercom_dds - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in opendds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_opendds - path: | - ./junit_interoperability_report.xml - ./timestamp - toc_coredx_dds: - runs-on: ubuntu-latest - needs: opendds - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: interoperability_report_opendds - - uses: actions/setup-python@v5 - with: - python-version: '3.11.4' - - name: Downloads assets - uses: robinraju/release-downloader@v1.10 - with: - latest: true - fileName: "*" - - name: Skip tests - run: | - echo "Skipping the following tests: $SKIP_EXEC" - clean_list="${SKIP_EXEC//[[:space:]]/}" - for name in ${clean_list//,/ }; do - rm -f "$name.zip" - done - - name: Unzip - run: unzip '*.zip' -d executables - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt - - name: Run Interoperability script - # The test descriptions used are the generated for the last execution. - # This shouldn't be an issue because all test are run always - run: | - source .venv/bin/activate - cd executables - for publisher in toc_coredx_dds-* ; do \ - if [ -e "$publisher" ]; then \ - for subscriber in * ; do \ - echo "Testing Publisher $publisher --- Subscriber $subscriber"; \ - python3 ./../interoperability_report.py -P ./$publisher -S ./$subscriber -o=./../junit_interoperability_report.xml; \ - if [ -d "./OpenDDS-durable-data-dir" ]; then \ - echo Deleting OpenDDS-durable-data-dir; \ - rm -rf ./OpenDDS-durable-data-dir; \ - fi; \ - done \ - fi; \ - done - - name: Attach the report - if: always() - uses: actions/upload-artifact@v4 - with: - name: interoperability_report_toc_coredx_dds - path: | - ./junit_interoperability_report.xml - ./timestamp + - name: Checkout sources + uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.11.4' + - name: Download shape_main executables + uses: robinraju/release-downloader@v1.10 + with: + repository: omg-dds/dds-rtps + latest: true + fileName: "*" + out-file-path: zipped_executables + - name: Unzip executables + run: unzip 'zipped_executables/*.zip' -d executables + - name: Install Python requirements + run: pip install --requirement requirements.txt + - name: Run Interoperability script + timeout-minutes: 60 + run: | + publisher_exe=executables/${{ matrix.publisher }}*shape_main_linux + subscriber_exe=executables/${{ matrix.subscriber }}*shape_main_linux + output_file=junit_report-${{ matrix.publisher }}-${{ matrix.subscriber }}.xml + extra_args="" + if [[ "${subscriber_exe,,}" == *opendds* && "${publisher_exe,,}" == *connext_dds* ]]; then + extra_args="--periodic-announcement 5000" + fi + python3 interoperability_report.py --publisher $publisher_exe --subscriber $subscriber_exe --output-name $output_file $extra_args + - name: Download timestamp + uses: actions/download-artifact@v4 + with: + name: timestamp + - name: Upload report + uses: actions/upload-artifact@v4 + with: + name: junit_report-${{ matrix.publisher }}-${{ matrix.subscriber }} + path: | + ./junit_report-${{ matrix.publisher }}-${{ matrix.subscriber }}.xml + ./timestamp + generate_report: runs-on: ubuntu-latest - needs: toc_coredx_dds + needs: run_tests steps: - name: Checkout uses: actions/checkout@v4 - - name: Download artifact + - name: Download artifacts uses: actions/download-artifact@v4 with: - name: interoperability_report_toc_coredx_dds + pattern: junit_report-* + merge-multiple: true - uses: actions/setup-python@v5 with: python-version: '3.11.4' - - name: Setting up environment - run: | - python3 -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt + - name: Install Python requirements + run: pip install --requirement requirements.txt + - name: merge reports + run: junitparser merge *.xml junit_interoperability_report.xml - name: Generate xlsx report - run: | - source .venv/bin/activate - python3 generate_xlsx_report.py --input junit_interoperability_report.xml --output interoperability_report.xlsx - - name: XUnit Viewer - id: xunit-viewer + run: python3 generate_xlsx_report.py --input junit_interoperability_report.xml --output interoperability_report.xlsx + - name: XUnit uses: AutoModality/action-xunit-viewer@v1 with: results: ./junit_interoperability_report.xml + fail: false + - name: Download timestamp + uses: actions/download-artifact@v4 + with: + name: timestamp - name: Attach the report if: always() uses: actions/upload-artifact@v4 diff --git a/.github/workflows/ci_dustdds.yml b/.github/workflows/ci_dustdds.yml new file mode 100644 index 00000000..65bebd3c --- /dev/null +++ b/.github/workflows/ci_dustdds.yml @@ -0,0 +1,31 @@ +name: CI DustDDS + +on: + workflow_dispatch: + pull_request: + paths: + - 'srcRs/DustDDS/**' + +jobs: + create_bin_release: + name: Create binary release + runs-on: ubuntu-latest + defaults: + run: + working-directory: srcRs/DustDDS + steps: + - name: Checkout sources + uses: actions/checkout@v4 + - name: Build executable + run: cargo build --package dust_dds_shape_main_linux --release + - name: Rename executable + run: | + version=$( cargo tree --package dust_dds --depth 0 --prefix none | tr -d 'dust_dds v' ) + cp ./target/release/dust_dds_shape_main_linux ./dust_dds-${version}_shape_main_linux + mkdir artifacts + zip --junk-paths artifacts/dust_dds-${version}_shape_main_linux.zip ./dust_dds-${version}_shape_main_linux + - name: Upload executable artifact + uses: actions/upload-artifact@v4 + with: + name: interoperability_executable + path: srcRs/DustDDS/artifacts/ diff --git a/.gitignore b/.gitignore index de5f6736..4897f53d 100644 --- a/.gitignore +++ b/.gitignore @@ -48,9 +48,16 @@ GeneratedCode # Generated files srcCxx/shape.* +!srcCxx/shape.idl srcCxx/shapePlugin.* srcCxx/shapeSupport.* +# Generated files Connext Micro +srcCxx/shape_bounded.* +!srcCxx/shape_bounded.idl +srcCxx/shape_boundedPlugin.* +srcCxx/shape_boundedSupport.* + # VSCode default folders .vscode/ build/ diff --git a/generate_xlsx_report.py b/generate_xlsx_report.py index 9a4351e7..af54edca 100644 --- a/generate_xlsx_report.py +++ b/generate_xlsx_report.py @@ -19,6 +19,22 @@ import datetime from rtps_test_utilities import log_message import test_suite +from enum import Enum + +class TestStatus(Enum): + """ + Enumeration of the test status. + PASSED: The test has passed + FAILED: The test has failed + PUB_UNSUPPORTED: The test is unsupported for the Publisher + SUB_UNSUPPORTED: The test is unsupported for the Subscriber + PUB_SUB_UNSUPPORTED: The test is unsupported for both Publisher and Subscriber + """ + PASSED = 1 + FAILED = 2 + PUB_UNSUPPORTED = 3 + SUB_UNSUPPORTED = 4 + PUB_SUB_UNSUPPORTED = 5 class XlxsReportArgumentParser: """Class that parse the arguments of the application.""" @@ -70,6 +86,8 @@ def get_company_name(product:str) -> str: def get_product_name(product:str) -> str: """Returns a beautified product name and version""" # set the beautified name and version + if 'connext' in product.lower() and 'micro' in product.lower(): + return 'Connext DDS Micro ' + re.search(r'([\d.]+)', product).group(1) if 'connext' in product.lower(): return 'Connext DDS ' + re.search(r'([\d.]+)', product).group(1) elif 'opendds' in product.lower(): @@ -88,14 +106,15 @@ def get_product_name(product:str) -> str: class JunitAggregatedData: """ - Class that contains the JUnit aggregated data as a tuple of 2 integers - [tests_passed, total_tests]. This identifies one cell in the summary - table that shows the product and the amount of tests passed and total. + Class that contains the JUnit aggregated data as a tuple of 3 integers + [tests_passed, total_tests, tests_unsupported]. This identifies one cell in + the summary table that shows the product and the amount of tests passed, + total and unsupported. """ - data: tuple[int,int] # [tests_passed, total_tests] + data: tuple[int,int, int] # [tests_passed, total_tests, tests_unsupported] - def __init__(self, passed_tests: int, total_tests: int) -> None: - self.data = [passed_tests, total_tests] + def __init__(self, passed_tests: int, total_tests: int, unsupported_tests: int) -> None: + self.data = [passed_tests, total_tests, unsupported_tests] def get_passed_tests(self): return self.data[0] @@ -103,8 +122,14 @@ def get_passed_tests(self): def get_total_tests(self): return self.data[1] + def get_unsupported_tests(self): + return self.data[2] + + def get_supported_tests(self): + return self.data[1] - self.data[2] + def __str__(self) -> str: - return f'({self.data[0]}, {self.data[1]})' + return f'({self.data[0]}, {self.data[1]}, {self.data[2]})' class JunitTestCaseAggregatedData: """ @@ -113,14 +138,13 @@ class JunitTestCaseAggregatedData: Publisher or Subscriber) and with all other products (as Subscribers or Publishers, the opposite). This tuple is composed by 2 strings that identifies the other product - (Publisher or Subscriber), the test name and whether the test was - successful or not. + (Publisher or Subscriber), the test name and the status of the test. """ - # [publisher or subscriber name, test_name, passed_tests] - data: tuple[str,str,bool] = None + # [publisher or subscriber name, test_name, status] + data: tuple[str,str,TestStatus] = None - def __init__(self, product: str, test_name: str, passed: bool) -> None: - self.data = (product, test_name, passed) + def __init__(self, product: str, test_name: str, status: TestStatus) -> None: + self.data = (product, test_name, status) def get_product_name(self): return self.data[0] @@ -128,7 +152,7 @@ def get_product_name(self): def get_test_name(self): return self.data[1] - def get_passed(self): + def get_status(self): return self.data[2] def __str__(self) -> str: @@ -166,6 +190,7 @@ def __init__(self, input: pathlib.Path): @staticmethod def xml_parser(file): """Function to parse the XML file""" + parser = lxml.etree.XMLParser(huge_tree=True) return lxml.etree.parse(file, parser) @@ -182,6 +207,7 @@ def update_value_aggregated_data_dict(self, updated_data = JunitAggregatedData( dictionary[key].get_passed_tests() + value.get_passed_tests(), dictionary[key].get_total_tests() + value.get_total_tests(), + dictionary[key].get_unsupported_tests() + value.get_unsupported_tests() ) dictionary[key] = updated_data else: @@ -218,47 +244,47 @@ def get_info(self, input: pathlib.Path = None): publisher_name = ProductUtils.get_product_name(product_names.group(1)) subscriber_name = ProductUtils.get_product_name(product_names.group(2)) - # get the value of the passed_tests and total_tests as a - # JunitAggregatedData - element = JunitAggregatedData( - suite.tests - suite.failures - suite.skipped - suite.errors, - suite.tests - ) - - # update the information of the product in the summary_dict with - # the information of the publisher and the subscriber - self.update_value_aggregated_data_dict( - self.summary_dict, publisher_name, element) - # do not add duplicated data if the publisher and subscriber names - # are the same - if publisher_name != subscriber_name: - self.update_value_aggregated_data_dict( - self.summary_dict, subscriber_name, element) - - # Get table with the summary of the test passed/total_tests for - # every product as publisher and as subscriber - product_dict_key = (publisher_name, subscriber_name) - product_test_data = JunitAggregatedData( - suite.tests - suite.failures - suite.skipped - suite.errors, - suite.tests) - self.update_value_aggregated_data_dict( - self.product_summary_dict, - product_dict_key, - product_test_data) - # for each test case in the test suite, fill out the dictionaries # that contains information about the product as publisher and # subscriber + unsupported_tests_count = 0 for case in list(iter(suite)): + is_pub_unsupported = False + is_sub_unsupported = False + status = None test_name = re.search(r'((?:Test_)[\S]+_\d+)', case.name).group(1) + # count number of unsupported tests for the summary + # result array is not empty and the message contains 'UNSUPPORTED_FEATURE' + if case.result and len(case.result) > 0: + if 'PUB_UNSUPPORTED_FEATURE' in case.result[0].message.upper(): + is_pub_unsupported = True + if 'SUB_UNSUPPORTED_FEATURE' in case.result[0].message.upper(): + is_sub_unsupported = True + + if is_pub_unsupported or is_sub_unsupported: + unsupported_tests_count += 1 + + # Get test status + if case.is_passed: + status = TestStatus.PASSED + elif is_pub_unsupported and is_sub_unsupported: + status = TestStatus.PUB_SUB_UNSUPPORTED + elif is_pub_unsupported: + status = TestStatus.PUB_UNSUPPORTED + elif is_sub_unsupported: + status = TestStatus.SUB_UNSUPPORTED + else: + status = TestStatus.FAILED + + # update the value of the publisher_name as publisher with # all products as subscribers. - # the tuple is (subscriber_name, test_name, is_passed) + # the tuple is (subscriber_name, test_name, status) publisher_test_result = JunitTestCaseAggregatedData( product=subscriber_name, test_name=test_name, - passed=case.is_passed + status=status ) # add the resulting tuple to the publisher dictionary, the key @@ -272,11 +298,11 @@ def get_info(self, input: pathlib.Path = None): # update the value of the subscriber_name as subscriber with # all products as publishers. - # the tuple is (publisher_name, test_name, is_passed) + # the tuple is (publisher_name, test_name, status) subscriber_test_result = JunitTestCaseAggregatedData( product=publisher_name, test_name=test_name, - passed=case.is_passed + status=status ) # add the resulting tuple to the subscriber dictionary, the key @@ -288,6 +314,37 @@ def get_info(self, input: pathlib.Path = None): product_dict=self.subscriber_product_dict ) + # get the value of the passed_tests, total_tests and + # unsupported_tests as a JunitAggregatedData + element = JunitAggregatedData( + suite.tests - suite.failures - suite.skipped - suite.errors, + suite.tests, + unsupported_tests_count + ) + + # update the information of the product in the summary_dict with + # the information of the publisher and the subscriber + self.update_value_aggregated_data_dict( + self.summary_dict, publisher_name, element) + # do not add duplicated data if the publisher and subscriber names + # are the same + if publisher_name != subscriber_name: + self.update_value_aggregated_data_dict( + self.summary_dict, subscriber_name, element) + + # Get table with the summary of the test + # passed/total_tests/unsupported_tests for every product as + # publisher and as subscriber + product_dict_key = (publisher_name, subscriber_name) + product_test_data = JunitAggregatedData( + suite.tests - suite.failures - suite.skipped - suite.errors, + suite.tests, + unsupported_tests_count) + self.update_value_aggregated_data_dict( + self.product_summary_dict, + product_dict_key, + product_test_data) + class ColorUtils: """Set specific colors""" GREEN = '#4EB168' @@ -422,7 +479,13 @@ def get_format_color(self, index: int, num_elements: int): Return the corresponding color format depending on the ratio of passed_tests/total_tests """ + # this might only happen for supported tests when the total supported + # scenarios is 0 + if num_elements == 0: + return self.__formats['result_red'] + ratio = index / num_elements + if ratio < 0.25: return self.__formats['result_red'] elif ratio < 0.5: @@ -434,17 +497,20 @@ def get_format_color(self, index: int, num_elements: int): else: # ratio == 1 return self.__formats['result_green'] - def get_format_color_bool(self, passed: bool): + def get_format_color_test_status(self, status: TestStatus): """ - Get the corresponding color format depending on 'passed'. - Green if passed is True, Red otherwise + Get the corresponding color format depending on 'status'. + Green if status is PASSED, Red if FAILED, Yellow if UNSUPPORTED """ - if passed: + if status == TestStatus.PASSED: # Return GREEN - return self.get_format_color(1,1) + return self.__formats['result_green'] + elif status == TestStatus.FAILED: + # Return RED + return self.__formats['result_red'] else: - # Return FALSE - return self.get_format_color(0,1) + # Return YELLOW + return self.__formats['result_yellow'] def add_static_data_test(self, worksheet: xlsxwriter.Workbook.worksheet_class, @@ -604,7 +670,7 @@ def add_product_table(self, 'Test', self.__formats['bold_w_border']) - # This column dictionary will keep the colum for the subscriber product + # This column dictionary will keep the column for the subscriber product column_dict = {} row_dict = {} # for all elements (test results), add the corresponding value to the @@ -645,13 +711,26 @@ def add_product_table(self, element.get_test_name(), self.__formats['bold_w_border']) - # set OK or ERROR if the test passed or not - str_result = 'OK' if element.get_passed() else 'ERROR' + # get status string of the test result + if element.get_status() == TestStatus.PASSED: + str_result = 'OK' + elif element.get_status() == TestStatus.FAILED: + str_result = 'ERROR' + elif element.get_status() == TestStatus.PUB_UNSUPPORTED: + str_result = 'PUB UNSUPPORTED' + elif element.get_status() == TestStatus.SUB_UNSUPPORTED: + str_result = 'SUB UNSUPPORTED' + elif element.get_status() == TestStatus.PUB_SUB_UNSUPPORTED: + str_result = 'PUB/SUB UNSUPPORTED' + else: + str_result = 'UNKNOWN' + + # write status string to the test result worksheet.write( process_row, process_column, str_result, - self.get_format_color_bool(element.get_passed())) + self.get_format_color_test_status(element.get_status())) return (current_row, current_column) def add_data_summary_worksheet(self, @@ -673,33 +752,60 @@ def add_data_summary_worksheet(self, 'Product', self.__formats['bold_w_border']) worksheet.write( current_row, current_column + 2, - 'Test Passed', self.__formats['bold_w_border']) + 'Tests Passed', self.__formats['bold_w_border']) + worksheet.write( + current_row, current_column + 3, + 'Supported Tests', self.__formats['bold_w_border']) + worksheet.write( + current_row, current_column + 4, + 'Supported Tests Passed', self.__formats['bold_w_border']) current_row += 1 # Create table with the total passed_tests/total_tests per product for product_name, value in self.__data.summary_dict.items(): + # company name worksheet.write( current_row, current_column, ProductUtils.get_company_name(product_name), self.__formats['bold_w_border']) + # product name worksheet.write( current_row, current_column + 1, product_name, self.__formats['bold_w_border']) + # test passed worksheet.write( current_row, current_column + 2, str(value.get_passed_tests()) + ' / ' + - str(value.get_total_tests()), + str(value.get_total_tests()), self.get_format_color(value.get_passed_tests(), value.get_total_tests())) + # supported tests + worksheet.write( + current_row, current_column + 3, + str(value.get_supported_tests()) + ' / ' + + str(value.get_total_tests()), + self.__formats['result_yellow'] if value.get_unsupported_tests() > 0 + else self.__formats['result_green']) + # supported tests passed + worksheet.write( + current_row, current_column + 4, + str(value.get_passed_tests()) + ' / ' + + str(value.get_supported_tests()), + self.get_format_color(value.get_passed_tests(), + value.get_supported_tests())) current_row += 1 # Add 2 rows of gap for the next table current_row += 2 worksheet.write( current_row, current_column, - 'Publisher/Subscriber', self.__formats['bold_w_border']) + 'Test Result: passed / supported / total', self.__formats['bold_w_border']) + current_row += 1 + worksheet.write( + current_row, current_column, + 'Publisher (row)/Subscriber (column)', self.__formats['bold_w_border']) # create a dictionary to store the row/column of the product name # for example, row_dict['Connext DDS 6.1.2'] = 30 means that the @@ -743,9 +849,10 @@ def add_data_summary_worksheet(self, process_column = column_dict[subscriber_name] worksheet.write(process_row, process_column, - str(value.get_passed_tests()) + ' / ' + - str(value.get_total_tests()), - self.get_format_color(value.get_passed_tests(), value.get_total_tests())) + str(value.get_passed_tests()) + ' / ' + + str(value.get_supported_tests()) + ' / ' + + str(value.get_total_tests()), + self.get_format_color(value.get_passed_tests(), value.get_supported_tests())) def add_static_data_summary_worksheet(self, worksheet: xlsxwriter.Workbook.worksheet_class, diff --git a/interoperability_report.py b/interoperability_report.py index 51c2eaaa..9c1bfedb 100644 --- a/interoperability_report.py +++ b/interoperability_report.py @@ -24,7 +24,7 @@ if __name__ == "__main__" and platform.system() == "Darwin": multiprocessing.set_start_method('fork') -from rtps_test_utilities import ReturnCode, log_message, no_check, remove_ansi_colors +from rtps_test_utilities import ReturnCode, log_message, basic_check, remove_ansi_colors # This parameter is used to save the samples the Publisher sends. # MAX_SAMPLES_SAVED is the maximum number of samples saved. @@ -51,16 +51,19 @@ def stop_process(child_process, timeout=30, poll_interval=0.2): else: return True # Process already exited - start_time = time.time() + return_value = True + start_time = time.time() while child_process.isalive() and (time.time() - start_time < timeout): time.sleep(poll_interval) if child_process.isalive(): child_process.terminate(force=True) - return False # Process was forcefully terminated + return_value = False # Process was forcefully terminated + + child_process.expect(pexpect.EOF, timeout=5) - return True + return return_value def run_subscriber_shape_main( name_executable: str, @@ -138,14 +141,17 @@ def run_subscriber_shape_main( index = child_sub.expect( [ 'Create topic:', # index = 0 - pexpect.TIMEOUT, # index = 1 - pexpect.EOF # index = 2 + re.compile('not supported', re.IGNORECASE), # index = 1 + pexpect.TIMEOUT, # index = 2 + pexpect.EOF # index = 3 ], timeout ) - if index == 1 or index == 2: + if index == 2 or index == 3: produced_code[produced_code_index] = ReturnCode.TOPIC_NOT_CREATED + elif index == 1: + produced_code[produced_code_index] = ReturnCode.SUB_UNSUPPORTED_FEATURE elif index == 0: # Step 3: Check if the reader is created log_message(f'Subscriber {subscriber_index}: Waiting for DataReader ' @@ -153,25 +159,33 @@ def run_subscriber_shape_main( index = child_sub.expect( [ 'Create reader for topic:', # index = 0 - pexpect.TIMEOUT, # index = 1 - 'failed to create content filtered topic' # index = 2 + 'failed to create content filtered topic', # index = 1 + re.compile('not supported', re.IGNORECASE), # index = 2 + pexpect.TIMEOUT, # index = 3 + pexpect.EOF # index = 4 + ], timeout ) - if index == 1: + if index == 3 or index == 4: produced_code[produced_code_index] = ReturnCode.READER_NOT_CREATED - elif index == 2: + elif index == 1: produced_code[produced_code_index] = ReturnCode.FILTER_NOT_CREATED + elif index == 2: + produced_code[produced_code_index] = ReturnCode.SUB_UNSUPPORTED_FEATURE elif index == 0: # Step 4: Read data or incompatible qos or deadline missed log_message(f'Subscriber {subscriber_index}: Waiting for data', verbosity) index = child_sub.expect( [ - '\[[0-9]+\]', # index = 0 + r'\[[0-9]+\]', # index = 0 'on_requested_incompatible_qos()', # index = 1 'on_requested_deadline_missed()', # index = 2 - pexpect.TIMEOUT, # index = 3 + re.compile('not supported', re.IGNORECASE), # index = 3 + pexpect.TIMEOUT, # index = 4 + pexpect.EOF # index = 5 + ], timeout ) @@ -180,8 +194,10 @@ def run_subscriber_shape_main( produced_code[produced_code_index] = ReturnCode.INCOMPATIBLE_QOS elif index == 2: produced_code[produced_code_index] = ReturnCode.DEADLINE_MISSED - elif index == 3: + elif index == 4 or index == 5: produced_code[produced_code_index] = ReturnCode.DATA_NOT_RECEIVED + elif index == 3: + produced_code[produced_code_index] = ReturnCode.SUB_UNSUPPORTED_FEATURE elif index == 0: # Step 5: Receiving samples log_message(f'Subscriber {subscriber_index}: Receiving samples', @@ -277,14 +293,17 @@ def run_publisher_shape_main( index = child_pub.expect( [ 'Create topic:', # index == 0 - pexpect.TIMEOUT, # index == 1 - pexpect.EOF # index == 2 + re.compile('not supported', re.IGNORECASE), # index = 1 + pexpect.TIMEOUT, # index == 2 + pexpect.EOF # index == 3 ], timeout ) - if index == 1 or index == 2: + if index == 2 or index == 3: produced_code[produced_code_index] = ReturnCode.TOPIC_NOT_CREATED + elif index == 1: + produced_code[produced_code_index] = ReturnCode.PUB_UNSUPPORTED_FEATURE elif index == 0: # Step 3: Check if the writer is created log_message(f'Publisher {publisher_index}: Waiting for DataWriter ' @@ -292,12 +311,16 @@ def run_publisher_shape_main( index = child_pub.expect( [ 'Create writer for topic', # index = 0 - pexpect.TIMEOUT # index = 1 + re.compile('not supported', re.IGNORECASE), # index = 1 + pexpect.TIMEOUT, # index = 2 + pexpect.EOF # index == 3 ], timeout ) - if index == 1: + if index == 2 or index == 3: produced_code[produced_code_index] = ReturnCode.WRITER_NOT_CREATED + elif index == 1: + produced_code[produced_code_index] = ReturnCode.PUB_UNSUPPORTED_FEATURE elif index == 0: # Step 4: Check if the writer matches the reader log_message(f'Publisher {publisher_index}: Waiting for matching ' @@ -305,15 +328,19 @@ def run_publisher_shape_main( index = child_pub.expect( [ 'on_publication_matched()', # index = 0 - pexpect.TIMEOUT, # index = 1 - 'on_offered_incompatible_qos' # index = 2 + 'on_offered_incompatible_qos', # index = 1 + re.compile('not supported', re.IGNORECASE), # index = 2 + pexpect.TIMEOUT, # index = 3 + pexpect.EOF # index == 4 ], timeout ) - if index == 1: + if index == 3 or index == 4: produced_code[produced_code_index] = ReturnCode.READER_NOT_MATCHED - elif index == 2: + elif index == 1: produced_code[produced_code_index] = ReturnCode.INCOMPATIBLE_QOS + elif index == 2: + produced_code[produced_code_index] = ReturnCode.PUB_UNSUPPORTED_FEATURE elif index == 0: # In the case that the option -w is selected, the Publisher # saves the samples sent in order, so the Subscriber can check @@ -324,15 +351,19 @@ def run_publisher_shape_main( if '-w ' in parameters or parameters.endswith('-w'): # Step 5: Check whether the writer sends the samples index = child_pub.expect([ - '\[[0-9]+\]', # index = 0 + r'\[[0-9]+\]', # index = 0 'on_offered_deadline_missed()', # index = 1 - pexpect.TIMEOUT # index = 2 + re.compile('not supported', re.IGNORECASE), # index = 2 + pexpect.TIMEOUT, # index = 3 + pexpect.EOF # index == 4 ], timeout) if index == 1: produced_code[produced_code_index] = ReturnCode.DEADLINE_MISSED - elif index == 2: + elif index == 3 or index == 4: produced_code[produced_code_index] = ReturnCode.DATA_NOT_SENT + elif index == 2: + produced_code[produced_code_index] = ReturnCode.PUB_UNSUPPORTED_FEATURE elif index == 0: produced_code[produced_code_index] = ReturnCode.OK log_message(f'Publisher {publisher_index}: Sending ' @@ -341,20 +372,24 @@ def run_publisher_shape_main( for x in range(0, MAX_SAMPLES_SAVED, 1): # At this point, at least one sample has been printed # Therefore, that sample is added to samples_sent. - pub_string = re.search('[0-9]+ [0-9]+ \[[0-9]+\]', + pub_string = re.search(r'[0-9]+ [0-9]+ \[[0-9]+\]', child_pub.before + child_pub.after) last_sample = pub_string.group(0) samples_sent.put(last_sample) index = child_pub.expect([ - '\[[0-9]+\]', # index = 0 + r'\[[0-9]+\]', # index = 0 'on_offered_deadline_missed()', # index = 1 - pexpect.TIMEOUT # index = 2 + re.compile('not supported', re.IGNORECASE), # index = 2 + pexpect.TIMEOUT # index = 3 ], timeout) if index == 1: produced_code[produced_code_index] = ReturnCode.DEADLINE_MISSED break elif index == 2: + produced_code[produced_code_index] = ReturnCode.PUB_UNSUPPORTED_FEATURE + break + elif index == 3: produced_code[produced_code_index] = ReturnCode.DATA_NOT_SENT break last_sample_saved.put(last_sample) @@ -811,7 +846,7 @@ def main(): raise RuntimeError('Cannot process function of ' f'test case: {test_case_name}') else: - check_function = no_check + check_function = basic_check assert(len(parameters) == len(expected_codes)) diff --git a/rtps_test_utilities.py b/rtps_test_utilities.py index 7e9068ea..4b2e746b 100644 --- a/rtps_test_utilities.py +++ b/rtps_test_utilities.py @@ -28,6 +28,8 @@ class ReturnCode(Enum): DEADLINE_MISSED : Publisher/Subscriber missed the deadline period ORDERED_ACCESS_INSTANCE : Subscriber reading with ordered access and access scope INSTANCE ORDERED_ACCESS_TOPIC : Subscriber reading with ordered access and access scope TOPIC + PUB_UNSUPPORTED_FEATURE : The test requires a feature not supported by the publisher implementation + SUB_UNSUPPORTED_FEATURE : The test requires a feature not supported by the subscriber implementation """ OK = 0 TOPIC_NOT_CREATED = 1 @@ -44,6 +46,8 @@ class ReturnCode(Enum): DEADLINE_MISSED = 14 ORDERED_ACCESS_INSTANCE = 15 ORDERED_ACCESS_TOPIC = 16 + PUB_UNSUPPORTED_FEATURE = 17 + SUB_UNSUPPORTED_FEATURE = 18 def log_message(message, verbosity): if verbosity: @@ -56,3 +60,18 @@ def remove_ansi_colors(text): def no_check(child_sub, samples_sent, last_sample_saved, timeout): return ReturnCode.OK + +def basic_check(child_sub, samples_sent, last_sample_saved, timeout): + """ Only checks that the data is well formed and size is not zero.""" + sub_string = re.search('\w\s+\w+\s+[0-9]+ [0-9]+ \[([0-9]+)\]', + child_sub.before + child_sub.after) + + if sub_string is None: + return ReturnCode.DATA_NOT_RECEIVED + + sample_size = int(sub_string.group(1)) + + if sample_size == 0: + return ReturnCode.DATA_NOT_CORRECT + + return ReturnCode.OK diff --git a/run_tests.sh b/run_tests.sh index be90102e..e12659d6 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -81,7 +81,7 @@ for i in $publisher; do subscriber_name=$(basename "$j" _shape_main_linux) echo "Testing Publisher $publisher_name --- Subscriber $subscriber_name" extra_args="" - if [[ "${subscriber,,}" == *opendds* && "${publisher,,}" == *connext* ]]; then + if [[ "${subscriber_name,,}" == *opendds* && "${publisher_name,,}" == *connext_dds* ]]; then extra_args="--periodic-announcement 5000" fi; if [[ -n $output ]]; then diff --git a/srcCxx/makefile_rti_connext_micro_linux b/srcCxx/makefile_rti_connext_micro_linux new file mode 100644 index 00000000..1475c322 --- /dev/null +++ b/srcCxx/makefile_rti_connext_micro_linux @@ -0,0 +1,89 @@ +###################################################################### +# To compile, type: +# make -f makefile_rti_connext_micro_linux +# To compile with the Debug option, use: +# make -f makefile_rti_connext_micro_linux DEBUG=1 +# +# This makefile assumes that your build environment is already correctly +# configured. (For example, the correct version of your compiler and +# linker should be on your PATH.) +# +# You should set the environemnt variable RTIMEHOME to point to where +# RTI Connext Micro is installed. +# +###################################################################### + +# If undefined in the environment default RTIMEHOME to install dir +ifndef RTIMEHOME +$(error RTIMEHOME not defined) +endif + +COMPILER_FLAGS = -m64 +LINKER_FLAGS = -m64 -static-libgcc + +split_path_name = $(subst /rti_, , $(RTIMEHOME)) +# from connext_dds_micro-x.y.z remove _dds to get connext_micro-x.y.z +product_name = $(notdir $(split_path_name)) +product_name := $(subst _dds,,$(product_name)) + +version_name = $(lastword $(product_name)) +common_name = "_shape_main_linux" +executable_name = $(version_name)$(common_name) + +RTIMEARCH = x64Linux4gcc7.3.0 + +ifndef COMPILER +COMPILER = g++ +endif + +ifndef LINKER +LINKER = g++ +endif + +SYSLIBS = -ldl -lnsl -lm -lpthread -lrt + +ifeq ($(DEBUG),1) +COMPILER_FLAGS += -g -O0 +LINKER_FLAGS += -g +LIBS = -L$(RTIMEHOME)/lib/$(RTIMEARCH) \ + -lrti_me_cppzd -lrti_me_netiosdmzd \ + -lrti_me_discdpdezd -lrti_me_ddsfilterzd -lrti_me_rhsmzd \ + -lrti_me_whsmzd -lrti_mezd -lrti_me_ddsxtypeszd $(SYSLIBS) +else +# This option strips the executable symbols +LINKER_FLAGS += -s +LIBS = -L$(RTIMEHOME)/lib/$(RTIMEARCH) \ + -lrti_me_cppz -lrti_me_netiosdmz \ + -lrti_me_discdpdez -lrti_me_ddsfilterz -lrti_me_rhsmz \ + -lrti_me_whsmz -lrti_mez -lrti_me_ddsxtypesz $(SYSLIBS) +endif + +DEFINES = -DRTI_UNIX -DRTI_LINUX -DRTI_CONNEXT_MICRO + +INCLUDES = -I. -I$(RTIMEHOME)/include -I$(RTIMEHOME)/include/rti_me + +OBJDIR := objs/$(RTIMEARCH)_micro + +CDRSOURCES := shape_bounded.idl +AUTOGENSOURCES := shape_boundedSupport.cxx shape_boundedPlugin.cxx shape_bounded.cxx + +EXEC := $(executable_name) +AUTOGENOBJS := $(addprefix $(OBJDIR)/, $(AUTOGENSOURCES:%.cxx=%.o)) + +$(OBJDIR)/$(EXEC) : $(AUTOGENSOURCES) $(AUTOGENOBJS) $(OBJDIR)/shape_main.o + $(LINKER) $(LINKER_FLAGS) -o $@ $(OBJDIR)/shape_main.o $(AUTOGENOBJS) $(LIBS) + +$(OBJDIR)/%.o : %.cxx + $(COMPILER) $(COMPILER_FLAGS) -o $@ $(DEFINES) $(INCLUDES) -c $< + +shape_main.cxx : shape_configurator_rti_connext_micro.h + +# Generate type-specific sources +$(AUTOGENSOURCES) : $(CDRSOURCES) + $(RTIMEHOME)/rtiddsgen/scripts/rtiddsgen $(CDRSOURCES) -replace -micro -language C++ + +$(AUTOGENOBJS): | objs/$(RTIMEARCH)_micro + +objs/$(RTIMEARCH)_micro: + echo "Making directory objs/$(RTIMEARCH)_micro"; + mkdir -p objs/$(RTIMEARCH)_micro diff --git a/srcCxx/makefile_rti_connext_micro_macos b/srcCxx/makefile_rti_connext_micro_macos new file mode 100644 index 00000000..25ca9b5c --- /dev/null +++ b/srcCxx/makefile_rti_connext_micro_macos @@ -0,0 +1,89 @@ +###################################################################### +# To compile, type: +# make -f makefile_rti_connext_micro_linux +# To compile with the Debug option, use: +# make -f makefile_rti_connext_micro_linux DEBUG=1 +# +# This makefile assumes that your build environment is already correctly +# configured. (For example, the correct version of your compiler and +# linker should be on your PATH.) +# +# You should set the environemnt variable RTIMEHOME to point to where +# RTI Connext Micro is installed. +# +###################################################################### + +# If undefined in the environment default RTIMEHOME to install dir +ifndef RTIMEHOME +$(error RTIMEHOME not defined) +endif + +COMPILER_FLAGS = -std=c++11 +LINKER_FLAGS = + +split_path_name = $(subst /rti_, , $(RTIMEHOME)) +# from connext_dds_micro-x.y.z remove _dds to get connext_micro-x.y.z +product_name = $(notdir $(split_path_name)) +product_name := $(subst _dds,,$(product_name)) + +version_name = $(lastword $(product_name)) +common_name = "_shape_main_macos" +executable_name = $(version_name)$(common_name) + +RTIMEARCH = arm64Darwin23clang15.0 + +ifndef COMPILER +COMPILER = clang++ +endif + +ifndef LINKER +LINKER = clang++ +endif + +#SYSLIBS = + +ifeq ($(DEBUG),1) +COMPILER_FLAGS += -g -O0 +LINKER_FLAGS += -g +LIBS = -L$(RTIMEHOME)/lib/$(RTIMEARCH) \ + -lrti_me_cppzd -lrti_me_netiosdmzd \ + -lrti_me_discdpdezd -lrti_me_ddsfilterzd -lrti_me_rhsmzd \ + -lrti_me_whsmzd -lrti_mezd -lrti_me_ddsxtypeszd $(SYSLIBS) +else +# This option strips the executable symbols +#LINKER_FLAGS += -s +LIBS = -L$(RTIMEHOME)/lib/$(RTIMEARCH) \ + -lrti_me_cppz -lrti_me_netiosdmz \ + -lrti_me_discdpdez -lrti_me_ddsfilterz -lrti_me_rhsmz \ + -lrti_me_whsmz -lrti_mez -lrti_me_ddsxtypesz $(SYSLIBS) +endif + +DEFINES = -DRTI_UNIX -DRTI_DARWIN -DRTI_CONNEXT_MICRO + +INCLUDES = -I. -I$(RTIMEHOME)/include -I$(RTIMEHOME)/include/rti_me + +OBJDIR := objs/$(RTIMEARCH)_micro + +CDRSOURCES := shape_bounded.idl +AUTOGENSOURCES := shape_boundedSupport.cxx shape_boundedPlugin.cxx shape_bounded.cxx + +EXEC := $(executable_name) +AUTOGENOBJS := $(addprefix $(OBJDIR)/, $(AUTOGENSOURCES:%.cxx=%.o)) + +$(OBJDIR)/$(EXEC) : $(AUTOGENSOURCES) $(AUTOGENOBJS) $(OBJDIR)/shape_main.o + $(LINKER) $(LINKER_FLAGS) -o $@ $(OBJDIR)/shape_main.o $(AUTOGENOBJS) $(LIBS) + +$(OBJDIR)/%.o : %.cxx + $(COMPILER) $(COMPILER_FLAGS) -o $@ $(DEFINES) $(INCLUDES) -c $< + +shape_main.cxx : shape_configurator_rti_connext_micro.h + +# Generate type-specific sources +$(AUTOGENSOURCES) : $(CDRSOURCES) + $(RTIMEHOME)/rtiddsgen/scripts/rtiddsgen $(CDRSOURCES) -replace -micro -language C++ + +$(AUTOGENOBJS): | objs/$(RTIMEARCH)_micro + +objs/$(RTIMEARCH)_micro: + echo "Making directory objs/$(RTIMEARCH)_micro"; + mkdir -p objs/$(RTIMEARCH)_micro diff --git a/srcCxx/shape_bounded.idl b/srcCxx/shape_bounded.idl new file mode 100644 index 00000000..3ffde7e5 --- /dev/null +++ b/srcCxx/shape_bounded.idl @@ -0,0 +1,9 @@ +@appendable +struct ShapeType { + @key + string<128> color; + long x; + long y; + long shapesize; + sequence additional_payload_size; +}; diff --git a/srcCxx/shape_configurator_rti_connext_dds.h b/srcCxx/shape_configurator_rti_connext_dds.h index 590cbd05..e2a905fb 100644 --- a/srcCxx/shape_configurator_rti_connext_dds.h +++ b/srcCxx/shape_configurator_rti_connext_dds.h @@ -1,3 +1,5 @@ +#include + #include "shape.h" #include "shapeSupport.h" #include "ndds/ndds_namespace_cpp.h" @@ -15,14 +17,43 @@ const char *get_qos_policy_name(DDS_QosPolicyId_t policy_id) return DDS_QosPolicyId_to_string(policy_id); // not standard... } +bool configure_datafrag_size( + DDS::DomainParticipantQos &dp_qos, + size_t datafrag_size) { + bool ok = false; + if (datafrag_size == 0) { + ok = false; + } else { + DDS_PropertyQosPolicyHelper_add_property( + &dp_qos.property, + "dds.transport.UDPv4.builtin.parent.message_size_max", + std::to_string(datafrag_size).c_str(), + DDS_BOOLEAN_FALSE); + ok = true; + } + return ok; +} + void configure_participant_announcements_period( DDS::DomainParticipantQos &dp_qos, useconds_t announcement_period_us) { if (announcement_period_us == 0) { return; } + dp_qos.discovery_config.participant_liveliness_assert_period.sec = announcement_period_us / 1000000; dp_qos.discovery_config.participant_liveliness_assert_period.nanosec = (announcement_period_us % 1000000) * 1000; } + +void configure_large_data(DDS::DataWriterQos &dw_qos) { + if (DDS::PropertyQosPolicyHelper::assert_property( + dw_qos.property, + "dds.data_writer.history.memory_manager.fast_pool.pool_buffer_max_size", + "65536", + DDS_BOOLEAN_FALSE) != DDS_RETCODE_OK) { + printf("failed to set property pool_buffer_max_size\n"); + } + dw_qos.publish_mode.kind = DDS::ASYNCHRONOUS_PUBLISH_MODE_QOS; +} diff --git a/srcCxx/shape_configurator_rti_connext_micro.h b/srcCxx/shape_configurator_rti_connext_micro.h new file mode 100644 index 00000000..46dae675 --- /dev/null +++ b/srcCxx/shape_configurator_rti_connext_micro.h @@ -0,0 +1,285 @@ +#include "shape_bounded.h" +#include "shape_boundedSupport.h" + +#include "rti_me_cpp.hxx" +#include "dds_cpp/dds_cpp_netio.hxx" + +#include +#include + +#define LISTENER_STATUS_MASK_ALL (DDS_STATUS_MASK_ALL) + +#ifndef XCDR_DATA_REPRESENTATION + #define XCDR_DATA_REPRESENTATION DDS_XCDR_DATA_REPRESENTATION +#endif + +#ifndef XCDR2_DATA_REPRESENTATION + #define XCDR2_DATA_REPRESENTATION DDS_XCDR2_DATA_REPRESENTATION +#endif + +#ifndef PresentationQosPolicyAccessScopeKind + #define PresentationQosPolicyAccessScopeKind DDS_PresentationQosPolicyAccessScopeKind +#endif + +#ifndef INSTANCE_PRESENTATION_QOS + #define INSTANCE_PRESENTATION_QOS DDS_INSTANCE_PRESENTATION_QOS +#endif + +#ifndef TOPIC_PRESENTATION_QOS + #define TOPIC_PRESENTATION_QOS DDS_TOPIC_PRESENTATION_QOS +#endif + +#ifndef GROUP_PRESENTATION_QOS + #define GROUP_PRESENTATION_QOS DDS_GROUP_PRESENTATION_QOS +#endif + + +#define DataRepresentationId_t DDS_DataRepresentationId_t +#define DataRepresentationIdSeq DDS_DataRepresentationIdSeq + +typedef CDR_StringSeq StringSeq; + +const DDS::DurabilityQosPolicyKind TRANSIENT_DURABILITY_QOS = DDS_TRANSIENT_DURABILITY_QOS; +const DDS::DurabilityQosPolicyKind PERSISTENT_DURABILITY_QOS = DDS_PERSISTENT_DURABILITY_QOS; + +void StringSeq_push(StringSeq &string_seq, const char *elem) +{ + string_seq.ensure_length(string_seq.length()+1, string_seq.length()+1); + string_seq[string_seq.length()-1] = DDS_String_dup(elem); +} + + +const char* get_qos_policy_name(DDS::QosPolicyId_t policy_id) +{ + //case DDS::USERDATA_QOS_POLICY_ID) { return "USERDATA"; + if (policy_id == DDS::DURABILITY_QOS_POLICY_ID) { return "DURABILITY"; } + else if (policy_id == DDS::PRESENTATION_QOS_POLICY_ID) { return "PRESENTATION"; } + else if (policy_id == DDS::DEADLINE_QOS_POLICY_ID) { return "DEADLINE"; } + else if (policy_id == DDS::LATENCYBUDGET_QOS_POLICY_ID) { return "LATENCYBUDGET"; } + else if (policy_id == DDS::OWNERSHIP_QOS_POLICY_ID) { return "OWNERSHIP"; } + else if (policy_id == DDS::OWNERSHIPSTRENGTH_QOS_POLICY_ID) { return "OWNERSHIPSTRENGTH"; } + else if (policy_id == DDS::LIVELINESS_QOS_POLICY_ID) { return "LIVELINESS"; } + else if (policy_id == DDS::TIMEBASEDFILTER_QOS_POLICY_ID) { return "TIMEBASEDFILTER"; } + else if (policy_id == DDS::PARTITION_QOS_POLICY_ID) { return "PARTITION"; } + else if (policy_id == DDS::RELIABILITY_QOS_POLICY_ID) { return "RELIABILITY"; } + else if (policy_id == DDS::DESTINATIONORDER_QOS_POLICY_ID) { return "DESTINATIONORDER"; } + else if (policy_id == DDS::HISTORY_QOS_POLICY_ID) { return "HISTORY"; } + else if (policy_id == DDS::RESOURCELIMITS_QOS_POLICY_ID) { return "RESOURCELIMITS"; } + else if (policy_id == DDS::ENTITYFACTORY_QOS_POLICY_ID) { return "ENTITYFACTORY"; } + else if (policy_id == DDS::WRITERDATALIFECYCLE_QOS_POLICY_ID) { return "WRITERDATALIFECYCLE"; } + else if (policy_id == DDS::READERDATALIFECYCLE_QOS_POLICY_ID) { return "READERDATALIFECYCLE"; } + else if (policy_id == DDS::TOPICDATA_QOS_POLICY_ID) { return "TOPICDATA"; } + else if (policy_id == DDS::GROUPDATA_QOS_POLICY_ID) { return "GROUPDATA"; } + else if (policy_id == DDS::TRANSPORTPRIORITY_QOS_POLICY_ID) { return "TRANSPORTPRIORITY"; } + else if (policy_id == DDS::LIFESPAN_QOS_POLICY_ID) { return "LIFESPAN"; } + else if (policy_id == DDS::DURABILITYSERVICE_QOS_POLICY_ID) { return "DURABILITYSERVICE"; } + else { return "Unknown"; } +} + +static bool config_micro() +{ + bool ok = false; + RT::Registry *registry = NULL; + DPDE::DiscoveryPluginProperty *discovery_plugin_properties = NULL; + UDP_InterfaceFactoryProperty *udp_property = NULL; + + OSAPI_Log_set_verbosity(OSAPI_LOG_VERBOSITY_SILENT); + + registry = DDSTheParticipantFactory->get_registry(); + + /* Register Writer History */ + if (!registry->register_component("wh", WHSMHistoryFactory::get_interface(), NULL, NULL)) + { + printf("ERROR: unable to register writer history\n"); + goto done; + } + + /* Register Reader History */ + if (!registry->register_component("rh", RHSMHistoryFactory::get_interface(), NULL, NULL)) + { + printf("ERROR: unable to register reader history\n"); + goto done; + } + + /* Configure UDP transport's allowed interfaces */ + if (!registry->unregister(NETIO_DEFAULT_UDP_NAME, NULL, NULL)) + { + printf("ERROR: unable to unregister udp\n"); + goto done; + } + + udp_property = new UDP_InterfaceFactoryProperty(); + if (udp_property == NULL) + { + printf("ERROR: unable to allocate udp properties\n"); + goto done; + } + + udp_property->max_message_size = 64 * 1024; //64KB + + if (!registry->register_component( + NETIO_DEFAULT_UDP_NAME, + UDPInterfaceFactory::get_interface(), + &udp_property->_parent._parent, + NULL)) { + printf("ERROR: unable to register udp\n"); + goto done; + } + + discovery_plugin_properties = new DPDE::DiscoveryPluginProperty(); + + /* Configure properties */ + discovery_plugin_properties->participant_liveliness_assert_period.sec = 5; + discovery_plugin_properties->participant_liveliness_assert_period.nanosec = 0; + discovery_plugin_properties->participant_liveliness_lease_duration.sec = 30; + discovery_plugin_properties->participant_liveliness_lease_duration.nanosec = 0; + + + if (!registry->register_component( + "dpde", + DPDEDiscoveryFactory::get_interface(), + &discovery_plugin_properties->_parent, + NULL)) { + printf("ERROR: unable to register dpde\n"); + goto done; + } + + ok = true; +done: + if (!ok) { + if (udp_property != NULL) { + delete udp_property; + } + if (discovery_plugin_properties != NULL) { + delete discovery_plugin_properties; + } + } + return ok; +} + +static bool configure_datafrag_size(unsigned int datafrag_size) { + + bool ok = false; + RT::Registry *registry = NULL; + UDP_InterfaceFactoryProperty *udp_property = NULL; + + registry = DDSTheParticipantFactory->get_registry(); + + if (!registry->unregister(NETIO_DEFAULT_UDP_NAME, NULL, NULL)) { + printf("ERROR: unable to unregister udp\n"); + goto done; + } + + udp_property = new UDP_InterfaceFactoryProperty(); + if (udp_property == NULL) { + printf("ERROR: unable to allocate udp properties\n"); + goto done; + } + + udp_property->max_message_size = datafrag_size; + + if (!registry->register_component( + NETIO_DEFAULT_UDP_NAME, + UDPInterfaceFactory::get_interface(), + &udp_property->_parent._parent, + NULL)) { + printf("ERROR: unable to register udp\n"); + goto done; + } + ok = true; +done: + if (!ok) { + if (udp_property != NULL) { + delete udp_property; + } + } + return ok; +} + +static bool configure_dp_qos(DDS::DomainParticipantQos &dp_qos) +{ + if (!dp_qos.discovery.discovery.name.set_name("dpde")) + { + printf("ERROR: unable to set discovery plugin name\n"); + return false; + } + + dp_qos.discovery.initial_peers.maximum(2); + dp_qos.discovery.initial_peers.length(2); + dp_qos.discovery.initial_peers[0] = DDS_String_dup("127.0.0.1"); + dp_qos.discovery.initial_peers[1] = DDS_String_dup("_udp://239.255.0.1"); + + /* if there are more remote or local endpoints, you need to increase these limits */ + dp_qos.resource_limits.max_destination_ports = 32; + dp_qos.resource_limits.max_receive_ports = 32; + dp_qos.resource_limits.local_topic_allocation = 8; + dp_qos.resource_limits.local_type_allocation = 8; + + dp_qos.resource_limits.local_reader_allocation = 8; + dp_qos.resource_limits.local_writer_allocation = 8; + dp_qos.resource_limits.remote_participant_allocation = 16; + dp_qos.resource_limits.remote_reader_allocation = 16; + dp_qos.resource_limits.remote_writer_allocation = 16; + return true; +} + +void config_dw_qos(DDS::DataWriterQos &dw_qos) { + dw_qos.resource_limits.max_instances = 500; + dw_qos.resource_limits.max_samples = 500; + dw_qos.resource_limits.max_samples_per_instance = 500; +} + +void config_dr_qos(DDS::DataReaderQos &dr_qos) { + dr_qos.resource_limits.max_instances = 500; + dr_qos.resource_limits.max_samples = 500; + dr_qos.resource_limits.max_samples_per_instance = 500; + dr_qos.reader_resource_limits.max_remote_writers = 16; + dr_qos.reader_resource_limits.max_samples_per_remote_writer = 500; + dr_qos.reader_resource_limits.max_fragmented_samples = 64; + dr_qos.reader_resource_limits.max_fragmented_samples_per_remote_writer = 32; +} + +uint64_t DDS_UInt8Seq_get_length(DDS_OctetSeq * seq) +{ + return seq->length(); +} + +void DDS_UInt8Seq_ensure_length(DDS_OctetSeq * seq, uint64_t length, uint64_t max) +{ + seq->ensure_length(length, max); +} + +unsigned char* DDS_UInt8Seq_get_reference(DDS_OctetSeq * seq, uint64_t index) +{ + return DDS_OctetSeq_get_reference(seq, index); +} + +const unsigned char* DDS_UInt8Seq_get_reference(const DDS_OctetSeq * seq, uint64_t index) +{ + return DDS_OctetSeq_get_reference(seq, index); +} + +void set_instance_color( + std::vector>& vec, + const DDS::InstanceHandle_t handle, + const std::string& color) { + // Check if the handle already exists + for (auto& p : vec) { + if (DDS_InstanceHandle_equals(&p.first, &handle)) { + return; + } + } + // If it doesn't exist, add it + vec.push_back(std::make_pair(handle, color)); +} + +std::string get_instance_color( + const std::vector>& vec, + const DDS::InstanceHandle_t handle) { + for (const auto& p : vec) { + if (DDS_InstanceHandle_equals(&p.first, &handle)) { + return p.second; + } + } + return ""; +} \ No newline at end of file diff --git a/srcCxx/shape_main.cxx b/srcCxx/shape_main.cxx index 0fbcb340..88c092ae 100644 --- a/srcCxx/shape_main.cxx +++ b/srcCxx/shape_main.cxx @@ -19,8 +19,12 @@ #include #include +#include + #if defined(RTI_CONNEXT_DDS) #include "shape_configurator_rti_connext_dds.h" +#elif defined(RTI_CONNEXT_MICRO) +#include "shape_configurator_rti_connext_micro.h" #elif defined(TWINOAKS_COREDX) #include "shape_configurator_toc_coredx_dds.h" #elif defined(OPENDDS) @@ -276,6 +280,11 @@ class ShapeOptions { useconds_t periodic_announcement_period_us; + unsigned int datafrag_size; + char* cft_expression; + + int size_modulo; + public: //------------------------------------------------------------- ShapeOptions() @@ -330,6 +339,11 @@ class ShapeOptions { take_read_next_instance = true; periodic_announcement_period_us = 0; + + datafrag_size = 0; // Default: 0 (means not set) + cft_expression = NULL; + + size_modulo = 0; // 0 means disabled } //------------------------------------------------------------- @@ -338,6 +352,7 @@ class ShapeOptions { STRING_FREE(topic_name); STRING_FREE(color); STRING_FREE(partition); + STRING_FREE(cft_expression); } //------------------------------------------------------------- @@ -403,6 +418,15 @@ class ShapeOptions { printf(" read_next_instance()\n"); printf(" --periodic-announcement : indicates the periodic participant\n"); printf(" announcement period in ms. Default 0 (off)\n"); + printf(" --datafrag-size : set the data fragment size (default: 0, means\n"); + printf(" not set)\n"); + printf(" --cft : ContentFilteredTopic filter expression (quotes\n"); + printf(" required around the expression). Cannot be used with\n"); + printf(" -c on subscriber applications\n"); + printf(" --size-modulo : If set, the modulo operation is applied to the\n"); + printf(" shapesize. This will make that shapesize is in the\n"); + printf(" range [1,N]. This only applies if shapesize is\n"); + printf(" increased (-z 0)\n"); } //------------------------------------------------------------- @@ -415,7 +439,7 @@ class ShapeOptions { logger.log_message("please specify publish [-P] or subscribe [-S]", Verbosity::ERROR); return false; } - if ( publish && subscribe ) { + if (publish && subscribe) { logger.log_message("please specify only one of: publish [-P] or subscribe [-S]", Verbosity::ERROR); return false; } @@ -432,6 +456,9 @@ class ShapeOptions { if (publish && take_read_next_instance == false ) { logger.log_message("warning: --take-read ignored on publisher applications", Verbosity::ERROR); } + if (publish && cft_expression != NULL) { + logger.log_message("warning: --cft ignored on publisher applications", Verbosity::ERROR); + } if (subscribe && shapesize != 20) { logger.log_message("warning: shapesize [-z] ignored on subscriber applications", Verbosity::ERROR); } @@ -456,6 +483,27 @@ class ShapeOptions { if (!coherent_set_enabled && !ordered_access_enabled && coherent_set_access_scope_set) { logger.log_message("warning: --access-scope ignored because not coherent, or ordered access enabled", Verbosity::ERROR); } + if (size_modulo > 0 && shapesize != 0) { + logger.log_message("warning: --size-modulo has no effect unless shapesize (-z) is set to 0", Verbosity::ERROR); + } + if (subscribe && color != NULL && cft_expression != NULL) { + logger.log_message("error: cannot specify both --cft and -c for subscriber applications", Verbosity::ERROR); + return false; + } + +#if defined(RTI_CONNEXT_MICRO) + if (subscribe && (color != NULL || cft_expression != NULL)) { + STRING_FREE(color); + color = NULL; + STRING_FREE(cft_expression); + cft_expression = NULL; + logger.log_message("warning: content filtered topic not supported, normal topic used", Verbosity::ERROR); + } + if (subscribe && take_read_next_instance) { + take_read_next_instance = false; + logger.log_message("warning: use of take/read_next_instance() not available, using take/read()", Verbosity::ERROR); + } +#endif return true; } @@ -483,6 +531,9 @@ class ShapeOptions { {"take-read", no_argument, NULL, 'K'}, {"time-filter", required_argument, NULL, 'i'}, {"periodic-announcement", required_argument, NULL, 'N'}, + {"datafrag-size", required_argument, NULL, 'Z'}, + {"cft", required_argument, NULL, 'F'}, + {"size-modulo", required_argument, NULL, 'Q'}, {NULL, 0, NULL, 0 } }; @@ -867,6 +918,38 @@ class ShapeOptions { periodic_announcement_period_us = (useconds_t) converted_param * 1000; break; } + case 'Z': { + unsigned int converted_param = 0; + if (sscanf(optarg, "%u", &converted_param) == 0) { + logger.log_message("unrecognized value for datafrag-size " + + std::string(1, optarg[0]), + Verbosity::ERROR); + parse_ok = false; + } + // the spec mentions that the fragment size must satisfy: + // fragment size <= 65535 bytes. + if (converted_param > 65535) { + logger.log_message("incorrect value for datafrag-size, " + "it must be <= 65535 bytes" + + std::to_string(converted_param), + Verbosity::ERROR); + parse_ok = false; + } + datafrag_size = converted_param; + } + case 'F': + cft_expression = strdup(optarg); + break; + case 'Q': { + int converted_param = 0; + if (sscanf(optarg, "%d", &converted_param) == 0 || converted_param < 1) { + logger.log_message("incorrect value for size-modulo, must be >=1", Verbosity::ERROR); + parse_ok = false; + } else { + size_modulo = converted_param; + } + break; + } case '?': parse_ok = false; break; @@ -892,7 +975,9 @@ class ShapeOptions { "\n TimeBasedFilterInterval = " + std::to_string(timebasedfilter_interval_us / 1000) + "ms" + "\n DeadlineInterval = " + std::to_string(deadline_interval_us / 1000) + "ms" + "\n Shapesize = " + std::to_string(shapesize) + - "\n Reading method = " + (use_read ? "read_next_instance" : "take_next_instance") + + "\n Reading method = " + (use_read + ? (take_read_next_instance ? "read_next_instance" : "read") + : (take_read_next_instance ? "take_next_instance" : "take")) + "\n Write period = " + std::to_string(write_period_us / 1000) + "ms" + "\n Read period = " + std::to_string(read_period_us / 1000) + "ms" + "\n Lifespan = " + std::to_string(lifespan_us / 1000) + "ms" + @@ -907,7 +992,8 @@ class ShapeOptions { "\n Final Instance State = " + (unregister ? "Unregister" : (dispose ? "Dispose" : "not specified")) + "\n Periodic Announcement Period = " - + std::to_string(periodic_announcement_period_us / 1000) + "ms", + + std::to_string(periodic_announcement_period_us / 1000) + "ms" + + "\n Data Fragmentation Size = " + std::to_string(datafrag_size) + " bytes", Verbosity::DEBUG); if (topic_name != NULL){ logger.log_message(" Topic = " + std::string(topic_name), @@ -1044,6 +1130,10 @@ class ShapeApplication { pub = NULL; sub = NULL; color = NULL; + + topics = NULL; + drs = NULL; + dws = NULL; } //------------------------------------------------------------- @@ -1072,23 +1162,26 @@ class ShapeApplication { topics[i] = NULL; } - drs = (ShapeTypeDataReader**) malloc(sizeof(ShapeTypeDataReader*) * options->num_topics); - if (drs == NULL) { - logger.log_message("Error allocating memory for DataReaders", Verbosity::ERROR); - return false; - } - for (unsigned int i = 0; i < options->num_topics; ++i) { - drs[i] = NULL; + if (options->publish) { + dws = (ShapeTypeDataWriter**) malloc(sizeof(ShapeTypeDataWriter*) * options->num_topics); + if (dws == NULL) { + logger.log_message("Error allocating memory for DataWriters", Verbosity::ERROR); + return false; + } + for (unsigned int i = 0; i < options->num_topics; ++i) { + dws[i] = NULL; + } + } else { + drs = (ShapeTypeDataReader**) malloc(sizeof(ShapeTypeDataReader*) * options->num_topics); + if (drs == NULL) { + logger.log_message("Error allocating memory for DataReaders", Verbosity::ERROR); + return false; + } + for (unsigned int i = 0; i < options->num_topics; ++i) { + drs[i] = NULL; + } } - dws = (ShapeTypeDataWriter**) malloc(sizeof(ShapeTypeDataWriter*) * options->num_topics); - if (dws == NULL) { - logger.log_message("Error allocating memory for DataWriters", Verbosity::ERROR); - return false; - } - for (unsigned int i = 0; i < options->num_topics; ++i) { - dws[i] = NULL; - } #ifndef OBTAIN_DOMAIN_PARTICIPANT_FACTORY #define OBTAIN_DOMAIN_PARTICIPANT_FACTORY DomainParticipantFactory::get_instance() @@ -1105,9 +1198,40 @@ class ShapeApplication { CONFIGURE_PARTICIPANT_FACTORY #endif - DomainParticipantQos dp_qos; +#ifdef RTI_CONNEXT_MICRO + if (!config_micro()) { + logger.log_message("Error configuring Connext Micro", Verbosity::ERROR); + return false; + } +#endif + + DDS::DomainParticipantQos dp_qos; dpf->get_default_participant_qos(dp_qos); + if (options->datafrag_size > 0) { + bool result = false; + #if defined(RTI_CONNEXT_DDS) + result = configure_datafrag_size(dp_qos, options->datafrag_size); + #elif defined(RTI_CONNEXT_MICRO) + result = configure_datafrag_size(options->datafrag_size); + #endif + + if (!result) { + logger.log_message("Error configuring Data Fragmentation Size = " + + std::to_string(options->datafrag_size), Verbosity::ERROR); + return false; + } else { + logger.log_message("Data Fragmentation Size = " + + std::to_string(options->datafrag_size), Verbosity::DEBUG); + } + } + +#ifdef RTI_CONNEXT_MICRO + if (!configure_dp_qos(dp_qos)) { + return false; + } +#endif + #ifdef RTI_CONNEXT_DDS configure_participant_announcements_period(dp_qos, options->periodic_announcement_period_us); #endif @@ -1118,6 +1242,7 @@ class ShapeApplication { return false; } logger.log_message("Participant created", Verbosity::DEBUG); + #ifndef REGISTER_TYPE #define REGISTER_TYPE ShapeTypeTypeSupport::register_type #endif @@ -1139,7 +1264,7 @@ class ShapeApplication { logger.log_message("Topics created:", Verbosity::DEBUG); for (unsigned int i = 0; i < options->num_topics; ++i) { if (logger.verbosity() == Verbosity::DEBUG) { - printf(" topic[%d]=%p\n",i,(void*)topics[i]); + printf(" topic(%d)=%p\n",i,(void*)topics[i]); } } @@ -1193,7 +1318,8 @@ class ShapeApplication { { logger.log_message(" Presentation Access Scope " + QosUtils::to_string(pub_qos.presentation.access_scope) - + std::string(" : Not supported"), Verbosity::ERROR); + + std::string(" : not supported"), Verbosity::ERROR); + return false; } #endif #if defined(INTERCOM_DDS) @@ -1201,22 +1327,31 @@ class ShapeApplication { { logger.log_message(" Coherent Access with Presentation Access Scope " + QosUtils::to_string(pub_qos.presentation.access_scope) - + std::string(" : Not supported"), Verbosity::ERROR); + + std::string(" : not supported"), Verbosity::ERROR); + return false; } #endif } - logger.log_message(" Presentation Coherent Access = " + std::string(pub_qos.presentation.coherent_access ? "true" : "false"), Verbosity::DEBUG); logger.log_message(" Presentation Ordered Access = " + std::string(pub_qos.presentation.ordered_access ? "true" : "false"), Verbosity::DEBUG); logger.log_message(" Presentation Access Scope = " + QosUtils::to_string(pub_qos.presentation.access_scope), Verbosity::DEBUG); - #else - logger.log_message(" Presentation Coherent Access = Not supported", Verbosity::ERROR); - logger.log_message(" Presentation Ordered Access = Not supported", Verbosity::ERROR); - logger.log_message(" Presentation Access Scope = Not supported", Verbosity::ERROR); + if (options->coherent_set_enabled) { + logger.log_message(" Presentation Coherent Access = not supported", Verbosity::ERROR); + return false; + } + if (options->ordered_access_enabled) { + logger.log_message(" Presentation Ordered Access = not supported", Verbosity::ERROR); + return false; + } + if ((options->coherent_set_enabled || options->ordered_access_enabled) + && (options->coherent_set_access_scope != INSTANCE_PRESENTATION_QOS)) { + logger.log_message(" Presentation Access Scope = not supported", Verbosity::ERROR); + return false; + } #endif pub = dp->create_publisher(pub_qos, NULL, LISTENER_STATUS_MASK_NONE); @@ -1227,12 +1362,26 @@ class ShapeApplication { logger.log_message("Publisher created", Verbosity::DEBUG); logger.log_message("Data Writer QoS:", Verbosity::DEBUG); pub->get_default_datawriter_qos( dw_qos ); + +#if defined (RTI_CONNEXT_MICRO) + config_dw_qos(dw_qos); +#endif + dw_qos.reliability FIELD_ACCESSOR.kind = options->reliability_kind; logger.log_message(" Reliability = " + QosUtils::to_string(dw_qos.reliability FIELD_ACCESSOR.kind), Verbosity::DEBUG); dw_qos.durability FIELD_ACCESSOR.kind = options->durability_kind; +#if defined(RTI_CONNEXT_MICRO) + if (dw_qos.durability FIELD_ACCESSOR.kind == TRANSIENT_DURABILITY_QOS) { + logger.log_message(" Durability = TRANSIENT_DURABILITY_QOS : not supported", Verbosity::ERROR); + return false; + } else if (dw_qos.durability FIELD_ACCESSOR.kind == PERSISTENT_DURABILITY_QOS) { + logger.log_message(" Durability = PERSISTENT_DURABILITY_QOS : not supported", Verbosity::ERROR); + return false; + } +#endif logger.log_message(" Durability = " + QosUtils::to_string(dw_qos.durability FIELD_ACCESSOR.kind), Verbosity::DEBUG); -#if defined(RTI_CONNEXT_DDS) +#if defined(RTI_CONNEXT_DDS) || defined (RTI_CONNEXT_MICRO) DataRepresentationIdSeq data_representation_seq; data_representation_seq.ensure_length(1,1); data_representation_seq[0] = options->data_representation; @@ -1279,9 +1428,9 @@ class ShapeApplication { dw_qos.deadline FIELD_ACCESSOR.period.SECONDS_FIELD_NAME = options->deadline_interval_us / 1000000; dw_qos.deadline FIELD_ACCESSOR.period.nanosec = (options->deadline_interval_us % 1000000) * 1000; } - logger.log_message(" DeadlinePeriod = " + std::to_string(dw_qos.deadline FIELD_ACCESSOR.period.SECONDS_FIELD_NAME) + "secs", + logger.log_message(" DeadlinePeriod = " + std::to_string(dw_qos.deadline FIELD_ACCESSOR.period.SECONDS_FIELD_NAME) + " secs", Verbosity::DEBUG); - logger.log_message(" " + std::to_string(dw_qos.deadline FIELD_ACCESSOR.period.nanosec) + "nanosecs", + logger.log_message(" " + std::to_string(dw_qos.deadline FIELD_ACCESSOR.period.nanosec) + " nanosecs", Verbosity::DEBUG); // options->history_depth < 0 means leave default value @@ -1298,38 +1447,39 @@ class ShapeApplication { } if (options->lifespan_us > 0) { -#if defined(RTI_CONNEXT_DDS) || defined(OPENDDS) || defined(TWINOAKS_COREDX) || defined(INTERCOM_DDS) +#if defined (RTI_CONNEXT_MICRO) + logger.log_message(" Lifespan = not supported", Verbosity::ERROR); + return false; +#elif defined(RTI_CONNEXT_DDS) || defined(OPENDDS) || defined(TWINOAKS_COREDX) || defined(INTERCOM_DDS) dw_qos.lifespan FIELD_ACCESSOR.duration.SECONDS_FIELD_NAME = options->lifespan_us / 1000000; dw_qos.lifespan FIELD_ACCESSOR.duration.nanosec = (options->lifespan_us % 1000000) * 1000; #elif defined(EPROSIMA_FAST_DDS) dw_qos.lifespan FIELD_ACCESSOR.duration = Duration_t(options->lifespan_us * 1e-6); #endif } - logger.log_message(" Lifespan = " + std::to_string(dw_qos.lifespan FIELD_ACCESSOR.duration.SECONDS_FIELD_NAME) + " secs", Verbosity::DEBUG); - logger.log_message(" " + std::to_string(dw_qos.lifespan FIELD_ACCESSOR.duration.nanosec) + " nanosecs", Verbosity::DEBUG); +#if !defined(RTI_CONNEXT_MICRO) + logger.log_message(" Lifespan = " + std::to_string(dw_qos.lifespan FIELD_ACCESSOR.duration.SECONDS_FIELD_NAME) + " secs", + Verbosity::DEBUG); + logger.log_message(" " + std::to_string(dw_qos.lifespan FIELD_ACCESSOR.duration.nanosec) + " nanosecs", + Verbosity::DEBUG); +#endif #if defined(RTI_CONNEXT_DDS) - // usage of large data - if (PropertyQosPolicyHelper::assert_property( - dw_qos.property, - "dds.data_writer.history.memory_manager.fast_pool.pool_buffer_max_size", - "65536", - DDS_BOOLEAN_FALSE) != DDS_RETCODE_OK) { - logger.log_message("failed to set property pool_buffer_max_size", Verbosity::ERROR); - } if (options->additional_payload_size > 64000) { - dw_qos.publish_mode.kind = ASYNCHRONOUS_PUBLISH_MODE_QOS; + configure_large_data(dw_qos); } logger.log_message(" Publish Mode kind = " + std::string(dw_qos.publish_mode.kind == ASYNCHRONOUS_PUBLISH_MODE_QOS ? "ASYNCHRONOUS_PUBLISH_MODE_QOS" : "SYNCHRONOUS_PUBLISH_MODE_QOS"), Verbosity::DEBUG); #endif +#if !defined(RTI_CONNEXT_MICRO) if (options->unregister) { dw_qos.writer_data_lifecycle FIELD_ACCESSOR .autodispose_unregistered_instances = DDS_BOOLEAN_FALSE; } logger.log_message(" Autodispose_unregistered_instances = " + std::string(dw_qos.writer_data_lifecycle FIELD_ACCESSOR .autodispose_unregistered_instances ? "true" : "false"), Verbosity::DEBUG); +#endif // Create different DataWriters (depending on the number of entities) // The DWs are attached to the same array index of the topics. @@ -1345,7 +1495,7 @@ class ShapeApplication { logger.log_message("DataWriters created:", Verbosity::DEBUG); for (unsigned int i = 0; i < options->num_topics; ++i) { if (logger.verbosity() == Verbosity::DEBUG) { - printf(" dws[%d]=%p\n",i,(void*)dws[i]); + printf(" dws(%d)=%p\n",i,(void*)dws[i]); } } @@ -1392,7 +1542,8 @@ class ShapeApplication { { logger.log_message(" Presentation Access Scope " + QosUtils::to_string(sub_qos.presentation.access_scope) - + std::string(" : Not supported"), Verbosity::ERROR); + + std::string(" : not supported"), Verbosity::ERROR); + return false; } #endif #if defined(INTERCOM_DDS) @@ -1400,7 +1551,8 @@ class ShapeApplication { { logger.log_message(" Coherent Access with Presentation Access Scope " + QosUtils::to_string(sub_qos.presentation.access_scope) - + std::string(" : Not supported"), Verbosity::ERROR); + + std::string(" : not supported"), Verbosity::ERROR); + return false; } #endif } @@ -1413,9 +1565,19 @@ class ShapeApplication { QosUtils::to_string(sub_qos.presentation.access_scope), Verbosity::DEBUG); #else - logger.log_message(" Presentation Coherent Access = Not supported", Verbosity::ERROR); - logger.log_message(" Presentation Ordered Access = Not supported", Verbosity::ERROR); - logger.log_message(" Presentation Access Scope = Not supported", Verbosity::ERROR); + if (options->coherent_set_enabled) { + logger.log_message(" Presentation Coherent Access = not supported", Verbosity::ERROR); + return false; + } + if (options->ordered_access_enabled) { + logger.log_message(" Presentation Ordered Access = not supported", Verbosity::ERROR); + return false; + } + if ((options->coherent_set_enabled || options->ordered_access_enabled) + && (options->coherent_set_access_scope != INSTANCE_PRESENTATION_QOS)) { + logger.log_message(" Presentation Access Scope = not supported", Verbosity::ERROR); + return false; + } #endif sub = dp->create_subscriber( sub_qos, NULL, LISTENER_STATUS_MASK_NONE ); @@ -1423,15 +1585,30 @@ class ShapeApplication { logger.log_message("failed to create subscriber", Verbosity::ERROR); return false; } + logger.log_message("Subscriber created", Verbosity::DEBUG); logger.log_message("Data Reader QoS:", Verbosity::DEBUG); sub->get_default_datareader_qos( dr_qos ); + +#if defined (RTI_CONNEXT_MICRO) + config_dr_qos(dr_qos); +#endif + dr_qos.reliability FIELD_ACCESSOR.kind = options->reliability_kind; logger.log_message(" Reliability = " + QosUtils::to_string(dr_qos.reliability FIELD_ACCESSOR.kind), Verbosity::DEBUG); dr_qos.durability FIELD_ACCESSOR.kind = options->durability_kind; +#if defined(RTI_CONNEXT_MICRO) + if (dr_qos.durability FIELD_ACCESSOR.kind == TRANSIENT_DURABILITY_QOS) { + logger.log_message(" Durability = TRANSIENT_DURABILITY_QOS : not supported", Verbosity::ERROR); + return false; + } else if (dr_qos.durability FIELD_ACCESSOR.kind == PERSISTENT_DURABILITY_QOS) { + logger.log_message(" Durability = PERSISTENT_DURABILITY_QOS : not supported", Verbosity::ERROR); + return false; + } +#endif logger.log_message(" Durability = " + QosUtils::to_string(dr_qos.durability FIELD_ACCESSOR.kind), Verbosity::DEBUG); -#if defined(RTI_CONNEXT_DDS) +#if defined(RTI_CONNEXT_DDS) || defined (RTI_CONNEXT_MICRO) DataRepresentationIdSeq data_representation_seq; data_representation_seq.ensure_length(1,1); data_representation_seq[0] = options->data_representation; @@ -1464,20 +1641,27 @@ class ShapeApplication { dr_qos.ownership FIELD_ACCESSOR.kind = EXCLUSIVE_OWNERSHIP_QOS; } logger.log_message(" Ownership = " + QosUtils::to_string(dr_qos.ownership FIELD_ACCESSOR.kind), Verbosity::DEBUG); + + if ( options->timebasedfilter_interval_us > 0) { -#if defined(EPROSIMA_FAST_DDS) - logger.log_message(" Time based filter not supported", Verbosity::ERROR); +#if defined(EPROSIMA_FAST_DDS) || defined(RTI_CONNEXT_MICRO) + logger.log_message(" TimeBasedFilter = not supported", Verbosity::ERROR); + return false; #else dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.SECONDS_FIELD_NAME = options->timebasedfilter_interval_us / 1000000; dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.nanosec = (options->timebasedfilter_interval_us % 1000000) * 1000; #endif } + +#if !defined(EPROSIMA_FAST_DDS) && !defined(RTI_CONNEXT_MICRO) logger.log_message(" TimeBasedFilter = " + - std::to_string(dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.SECONDS_FIELD_NAME) + "secs", - Verbosity::DEBUG); + std::to_string(dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.SECONDS_FIELD_NAME) + "secs", + Verbosity::DEBUG); logger.log_message(" " + - std::to_string(dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.nanosec) + "nanosecs", - Verbosity::DEBUG); + std::to_string(dr_qos.time_based_filter FIELD_ACCESSOR.minimum_separation.nanosec) + "nanosecs", + Verbosity::DEBUG); +#endif + if ( options->deadline_interval_us > 0 ) { dr_qos.deadline FIELD_ACCESSOR.period.SECONDS_FIELD_NAME = options->deadline_interval_us / 1000000; dr_qos.deadline FIELD_ACCESSOR.period.nanosec = (options->deadline_interval_us % 1000000) * 1000;; @@ -1500,56 +1684,69 @@ class ShapeApplication { logger.log_message(" HistoryDepth = " + std::to_string(dr_qos.history FIELD_ACCESSOR.depth), Verbosity::DEBUG); } - if ( options->color != NULL ) { + if ( options->cft_expression != NULL || options->color != NULL) { + /* For Connext Micro color and cft_expression will be always NULL */ +#if !defined(RTI_CONNEXT_MICRO) /* filter on specified color */ ContentFilteredTopic *cft = NULL; - StringSeq cf_params; + StringSeq cf_params; - for (unsigned int i = 0; i < options->num_topics; ++i) { - const std::string filtered_topic_name_str = + for (unsigned int i = 0; i < options->num_topics; ++i) { + const std::string filtered_topic_name_str = std::string(options->topic_name) + (i > 0 ? std::to_string(i) : "") + "_filtered"; - const char* filtered_topic_name = filtered_topic_name_str.c_str(); -#if defined(RTI_CONNEXT_DDS) - char parameter[64]; - snprintf(parameter, 64, "'%s'", options->color); - StringSeq_push(cf_params, parameter); + const char* filtered_topic_name = filtered_topic_name_str.c_str(); + const char* filter_expr = nullptr; + + if (options->cft_expression != NULL) { + filter_expr = options->cft_expression; + cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], filter_expr, cf_params); + logger.log_message(" ContentFilterTopic = \"" + std::string(filter_expr) + "\"", Verbosity::DEBUG); + } else if (options->color != NULL) { + #if defined(RTI_CONNEXT_DDS) + char parameter[64]; + snprintf(parameter, 64, "'%s'", options->color); + StringSeq_push(cf_params, parameter); + + cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color MATCH %0", cf_params); + logger.log_message(" ContentFilterTopic = \"color MATCH " + + std::string(parameter) + std::string("\""), Verbosity::DEBUG); + #elif defined(INTERCOM_DDS) + char parameter[64]; + snprintf(parameter, 64, "'%s'", options->color); + StringSeq_push(cf_params, parameter); + + cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); + logger.log_message(" ContentFilterTopic = \"color = " + + std::string(parameter) + std::string("\""), Verbosity::DEBUG); + #elif defined(TWINOAKS_COREDX) || defined(OPENDDS) + StringSeq_push(cf_params, options->color); + cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); + logger.log_message(" ContentFilterTopic = \"color = " + + std::string(options->color) + std::string("\""), Verbosity::DEBUG); + #elif defined(EPROSIMA_FAST_DDS) + cf_params.push_back(std::string("'") + options->color + std::string("'")); + cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); + logger.log_message(" ContentFilterTopic = \"color = " + + cf_params[0] + std::string("\""), Verbosity::DEBUG); + #endif + } - cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color MATCH %0", cf_params); - logger.log_message(" ContentFilterTopic = \"color MATCH " - + std::string(parameter) + std::string("\""), Verbosity::DEBUG); -#elif defined(INTERCOM_DDS) - char parameter[64]; - snprintf(parameter, 64, "'%s'", options->color); - StringSeq_push(cf_params, parameter); - - cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); - logger.log_message(" ContentFilterTopic = \"color = " - + std::string(parameter) + std::string("\""), Verbosity::DEBUG); -#elif defined(TWINOAKS_COREDX) || defined(OPENDDS) - StringSeq_push(cf_params, options->color); - cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); - logger.log_message(" ContentFilterTopic = \"color = " - + std::string(options->color) + std::string("\""), Verbosity::DEBUG); -#elif defined(EPROSIMA_FAST_DDS) - cf_params.push_back(std::string("'") + options->color + std::string("'")); - cft = dp->create_contentfilteredtopic(filtered_topic_name, topics[i], "color = %0", cf_params); - logger.log_message(" ContentFilterTopic = \"color = " - + cf_params[0] + std::string("\""), Verbosity::DEBUG); -#endif if (cft == NULL) { logger.log_message("failed to create content filtered topic", Verbosity::ERROR); return false; } - printf("Create reader for topic: %s color: %s\n", cft->get_name() NAME_ACCESSOR, options->color ); + printf("Create reader for topic: %s\n", cft->get_name() NAME_ACCESSOR); drs[i] = dynamic_cast(sub->create_datareader(cft, dr_qos, NULL, LISTENER_STATUS_MASK_NONE)); if (drs[i] == NULL) { logger.log_message("failed to create datareader[" + std::to_string(i) + "] topic: " + topics[i]->get_name(), Verbosity::ERROR); return false; } } +#endif + } else { // Create different DataReaders (depending on the number of entities) // The DRs are attached to the same array index of the topics. @@ -1565,7 +1762,7 @@ class ShapeApplication { logger.log_message("DataReaders created:", Verbosity::DEBUG); for (unsigned int i = 0; i < options->num_topics; ++i) { if (logger.verbosity() == Verbosity::DEBUG) { - printf(" drs[%d]=%p\n",i,(void*)drs[i]); + printf(" drs(%d)=%p\n",i,(void*)drs[i]); } } @@ -1584,7 +1781,7 @@ class ShapeApplication { static void shape_initialize_w_color(ShapeType &shape, const char * color_value) { -#if defined(RTI_CONNEXT_DDS) +#if defined(RTI_CONNEXT_DDS) || defined(RTI_CONNEXT_MICRO) ShapeType_initialize(&shape); #endif @@ -1616,13 +1813,15 @@ class ShapeApplication { #if defined(EPROSIMA_FAST_DDS) // TODO: Remove when Fast DDS supports `get_key_value()` std::map instance_handle_color; +#elif defined(RTI_CONNEXT_MICRO) + std::vector> instance_handle_color; #endif while ( ! all_done ) { ReturnCode_t retval; SampleInfoSeq sample_infos; -#if defined(RTI_CONNEXT_DDS) || defined(OPENDDS) || defined(INTERCOM_DDS) +#if defined(RTI_CONNEXT_DDS) || defined(RTI_CONNEXT_MICRO) || defined(OPENDDS) || defined(INTERCOM_DDS) ShapeTypeSeq samples; #elif defined(TWINOAKS_COREDX) ShapeTypePtrSeq samples; @@ -1631,6 +1830,7 @@ class ShapeApplication { DataSeq samples; #endif +#if defined(RTI_CONNEXT_DDS) || defined(TWINOAKS_COREDX) || defined(INTERCOM_DDS) if (options->coherent_set_enabled) { printf("Reading coherent sets, iteration %d\n",n); } @@ -1640,12 +1840,14 @@ class ShapeApplication { if (options->coherent_set_enabled || options->ordered_access_enabled) { sub->begin_access(); } +#endif for (unsigned int i = 0; i < options->num_topics; ++i) { previous_handles[i] = HANDLE_NIL; do { if (!options->use_read) { if (options->take_read_next_instance) { logger.log_message("Calling take_next_instance() function", Verbosity::DEBUG); +#if !defined(RTI_CONNEXT_MICRO) retval = drs[i]->take_next_instance ( samples, sample_infos, LENGTH_UNLIMITED, @@ -1653,6 +1855,7 @@ class ShapeApplication { ANY_SAMPLE_STATE, ANY_VIEW_STATE, ANY_INSTANCE_STATE ); +#endif } else { logger.log_message("Calling take() function", Verbosity::DEBUG); retval = drs[i]->take ( samples, @@ -1664,6 +1867,7 @@ class ShapeApplication { } } else { /* Use read_next_instance*/ if (options->take_read_next_instance) { +#if !defined(RTI_CONNEXT_MICRO) logger.log_message("Calling read_next_instance() function", Verbosity::DEBUG); retval = drs[i]->read_next_instance ( samples, sample_infos, @@ -1672,6 +1876,7 @@ class ShapeApplication { ANY_SAMPLE_STATE, ANY_VIEW_STATE, ANY_INSTANCE_STATE ); +#endif } else { logger.log_message("Calling read() function", Verbosity::DEBUG); retval = drs[i]->read ( samples, @@ -1694,7 +1899,7 @@ class ShapeApplication { for (decltype(n_samples) n_sample = 0; n_sample < n_samples; n_sample++) { logger.log_message("Processing sample " + std::to_string(n_sample), Verbosity::DEBUG); -#if defined(RTI_CONNEXT_DDS) +#if defined(RTI_CONNEXT_DDS) || defined(RTI_CONNEXT_MICRO) ShapeType *sample = &samples[n_sample]; SampleInfo *sample_info = &sample_infos[n_sample]; #elif defined(TWINOAKS_COREDX) @@ -1718,18 +1923,26 @@ class ShapeApplication { #else if (DDS_UInt8Seq_get_length(&sample->additional_payload_size FIELD_ACCESSOR) > 0) { int additional_payload_index = DDS_UInt8Seq_get_length(&sample->additional_payload_size FIELD_ACCESSOR) - 1; - printf(" {%u}", sample->additional_payload_size FIELD_ACCESSOR [additional_payload_index]); + printf(" {%u}", sample->additional_payload_size FIELD_ACCESSOR [additional_payload_index]); } #endif printf("\n"); #if defined(EPROSIMA_FAST_DDS) instance_handle_color[sample_info->instance_handle] = sample->color FIELD_ACCESSOR STRING_IN; +#elif defined(RTI_CONNEXT_MICRO) + set_instance_color(instance_handle_color, sample_info->instance_handle, sample->color); #endif - } else { + } + + if (sample_info->instance_state != ALIVE_INSTANCE_STATE) { ShapeType shape_key; shape_initialize_w_color(shape_key, NULL); + #if defined(EPROSIMA_FAST_DDS) shape_key.color FIELD_ACCESSOR = instance_handle_color[sample_info->instance_handle] NAME_ACCESSOR; +#elif defined(RTI_CONNEXT_MICRO) + // 128 is the max length of the color string + strncpy(shape_key.color, get_instance_color(instance_handle_color, sample_info->instance_handle).c_str(), 128); #else drs[i]->get_key_value(shape_key, sample_info->instance_handle); #endif @@ -1745,7 +1958,7 @@ class ShapeApplication { } } -#if defined(RTI_CONNEXT_DDS) || defined(OPENDDS) || defined(EPROSIMA_FAST_DDS) || defined(INTERCOM_DDS) +#if defined(RTI_CONNEXT_DDS) || defined(RTI_CONNEXT_MICRO) || defined(OPENDDS) || defined(EPROSIMA_FAST_DDS) || defined(INTERCOM_DDS) previous_handles[i] = sample_infos[0].instance_handle; #elif defined(TWINOAKS_COREDX) previous_handles[i] = sample_infos[0]->instance_handle; @@ -1756,9 +1969,11 @@ class ShapeApplication { } while (retval == RETCODE_OK); } +#if defined(RTI_CONNEXT_DDS) || defined(TWINOAKS_COREDX) || defined(INTERCOM_DDS) if (options->coherent_set_enabled || options->ordered_access_enabled) { sub->end_access(); } +#endif // increasing number of iterations n++; @@ -1831,9 +2046,16 @@ class ShapeApplication { moveShape(&shape); if (options->shapesize == 0) { - shape.shapesize FIELD_ACCESSOR += 1; + if (options->size_modulo > 0) { + // Size cannot be 0, so increase it after modulo operation + shape.shapesize FIELD_ACCESSOR = + (shape.shapesize FIELD_ACCESSOR % options->size_modulo) + 1; + } else { + shape.shapesize FIELD_ACCESSOR += 1; + } } +#if !defined(RTI_CONNEXT_MICRO) if (options->coherent_set_enabled || options->ordered_access_enabled) { // n also represents the number of samples written per publisher per instance if (options->coherent_set_sample_count != 0 && n % options->coherent_set_sample_count == 0) { @@ -1841,6 +2063,7 @@ class ShapeApplication { pub->begin_coherent_changes(); } } +#endif for (unsigned int i = 0; i < options->num_topics; ++i) { for (unsigned int j = 0; j < options->num_instances; ++j) { @@ -1850,7 +2073,7 @@ class ShapeApplication { shape_set_color(shape, instance_color.c_str()); } -#if defined(RTI_CONNEXT_DDS) || defined(OPENDDS) || defined(INTERCOM_DDS) || defined(TWINOAKS_COREDX) +#if defined(RTI_CONNEXT_DDS) || defined(RTI_CONNEXT_MICRO) || defined(OPENDDS) || defined(INTERCOM_DDS) || defined(TWINOAKS_COREDX) dws[i]->write( shape, HANDLE_NIL ); #elif defined(EPROSIMA_FAST_DDS) dws[i]->write( &shape, HANDLE_NIL ); @@ -1871,6 +2094,7 @@ class ShapeApplication { } } +#if !defined(RTI_CONNEXT_MICRO) if (options->coherent_set_enabled || options->ordered_access_enabled) { // n also represents the number of samples written per publisher per instance if (options->coherent_set_sample_count != 0 @@ -1879,6 +2103,7 @@ class ShapeApplication { pub->end_coherent_changes(); } } +#endif usleep(options->write_period_us); // increase number of iterations @@ -1922,13 +2147,15 @@ class ShapeApplication { /* ensure that all updates have been acked by reader[s] */ /* otherwise the app may terminate before reader has seen all updates */ -#if defined(RTI_CONNEXT_DDS) || defined (OPENDDS) +#if defined(RTI_CONNEXT_DDS) || defined (RTI_CONNEXT_MICRO) || defined (OPENDDS) Duration_t max_wait = {1, 0}; /* should not take long... */ #else Duration_t max_wait( 1, 0 ); /* should not take long... */ #endif for (unsigned int i = 0; i < options->num_topics; ++i) { +#if !defined(RTI_CONNEXT_MICRO) dws[i]->wait_for_acknowledgments( max_wait ); +#endif } return true; diff --git a/srcRs/DustDDS/Cargo.lock b/srcRs/DustDDS/Cargo.lock index 69cc90dc..d545fa54 100644 --- a/srcRs/DustDDS/Cargo.lock +++ b/srcRs/DustDDS/Cargo.lock @@ -1,21 +1,12 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] +version = 4 [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -28,43 +19,55 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", ] [[package]] name = "bitflags" -version = "2.6.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "block-buffer" @@ -76,25 +79,29 @@ dependencies = [ ] [[package]] -name = "byteorder" -version = "1.5.0" +name = "block2" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] [[package]] name = "cc" -version = "1.1.15" +version = "1.2.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6" +checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07" dependencies = [ + "find-msvc-tools", "shlex", ] [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -104,9 +111,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "clap" -version = "4.5.16" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -114,9 +121,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -126,9 +133,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck", "proc-macro2", @@ -138,30 +145,45 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "concurrent-queue" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] [[package]] name = "cpufeatures" -version = "0.2.13" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "typenum", @@ -169,12 +191,13 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.4.5" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" +checksum = "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790" dependencies = [ + "dispatch2", "nix", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -187,36 +210,38 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "dispatch2" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags", + "block2", + "libc", + "objc2", +] + [[package]] name = "dust_dds" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee155270750643d8586cfe5d0e7cdf148cf4754e268f361a977cd1c6b23f5979" +checksum = "e0c41e5b28f155aec2860a5485db193485e57649bfce59258aa02e8b4e93f778" dependencies = [ + "async-lock", "dust_dds_derive", - "fnmatch-regex", "md5", "network-interface", + "regex", "socket2", "tracing", ] -[[package]] -name = "dust_dds__shape_main_linux" -version = "0.1.0" -dependencies = [ - "clap", - "ctrlc", - "dust_dds", - "dust_dds_gen", - "rand", -] - [[package]] name = "dust_dds_derive" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2f6c1e0113699edc14a6a2700fd1d3979f14f0c6a4b80006d7805bc158abdde" +checksum = "ad07e59a9f8ff513bba952dfa82c4667e6162ba5b1e159e020845eded535b489" dependencies = [ "proc-macro2", "quote", @@ -226,31 +251,52 @@ dependencies = [ [[package]] name = "dust_dds_gen" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5c4218bce243cfa7c272d0ef8775b09b75c9a918d9a031434263385047b369c" +checksum = "916c6f88f9d4989e4d62d0dd51717f28376c44f4402dee54a122efcb4f65cc98" dependencies = [ "pest", "pest_derive", ] [[package]] -name = "either" -version = "1.13.0" +name = "dust_dds_shape_main_linux" +version = "0.1.0" +dependencies = [ + "clap", + "ctrlc", + "dust_dds", + "dust_dds_gen", + "rand", +] + +[[package]] +name = "event-listener" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] [[package]] -name = "fnmatch-regex" -version = "0.2.0" +name = "event-listener-strategy" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d36d4d9558f46504ee286d7211e463d813cee1b778976c8bde5e3270ac67186" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "itertools", - "quick-error", - "regex", + "event-listener", + "pin-project-lite", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + [[package]] name = "generic-array" version = "0.14.7" @@ -263,9 +309,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", @@ -280,24 +326,15 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "md5" @@ -307,9 +344,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "network-interface" @@ -325,9 +362,9 @@ dependencies = [ [[package]] name = "nix" -version = "0.29.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ "bitflags", "cfg-if", @@ -336,27 +373,47 @@ dependencies = [ ] [[package]] -name = "once_cell" -version = "1.19.0" +name = "objc2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "parking" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "pest" -version = "2.7.11" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" dependencies = [ "memchr", - "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.11" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" dependencies = [ "pest", "pest_generator", @@ -364,9 +421,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.11" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" dependencies = [ "pest", "pest_meta", @@ -377,50 +434,43 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.7.11" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" dependencies = [ - "once_cell", "pest", "sha2", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" -version = "1.0.37" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -457,38 +507,34 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ - "aho-corasick", - "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ - "aho-corasick", - "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -503,9 +549,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", @@ -519,9 +565,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.76" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", @@ -530,18 +576,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", @@ -550,9 +596,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -561,9 +607,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", @@ -572,30 +618,27 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" -dependencies = [ - "once_cell", -] +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" [[package]] name = "typenum" -version = "1.17.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "utf8parse" @@ -611,9 +654,9 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "winapi" @@ -637,6 +680,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.52.0" @@ -648,11 +697,11 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets", + "windows-link", ] [[package]] @@ -721,25 +770,24 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "xml-rs" -version = "0.8.21" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "539a77ee7c0de333dcc6da69b177380a0b81e0dacfa4f7344c465a36871ee601" +checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "43fa6694ed34d6e57407afbccdeecfa268c470a7d2a5b0cf49ce9fcc345afb90" dependencies = [ - "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "c640b22cd9817fae95be82f0d2f90b11f7605f6c319d16705c459b27ac2cbc26" dependencies = [ "proc-macro2", "quote", diff --git a/srcRs/DustDDS/Cargo.toml b/srcRs/DustDDS/Cargo.toml index 6f477a4d..7b824806 100644 --- a/srcRs/DustDDS/Cargo.toml +++ b/srcRs/DustDDS/Cargo.toml @@ -1,19 +1,23 @@ [package] -name = "dust_dds__shape_main_linux" +name = "dust_dds_shape_main_linux" version = "0.1.0" +description = "OMG DDS_RTPS Interoperability" +edition = "2024" +rust-version = "1.85" authors = [ - "Joao Rebelo ", - "Stefan Kimmer ", + "Joao Rebelo ", + "Stefan Kimmer ", ] -license = "Apache-2.0" -edition = "2021" -description = "OMG DDS_RTPS Interoperability" +homepage = "https://s2e-systems.com/products/dust-dds" +repository = "https://github.com/s2e-systems/dust-dds.git" +readme = false +publish = false [dependencies] -dust_dds = { version = "0.11.0" } -clap = { version = "4.4.11", features = ["derive", "string"] } +clap = { version = "4.5.47", features = ["derive", "string"] } rand = "0.8.5" ctrlc = "3.4" +dust_dds = "0.14.0" [build-dependencies] -dust_dds_gen = { version = "0.11.0" } \ No newline at end of file +dust_dds_gen = "0.14.0" diff --git a/srcRs/DustDDS/build.rs b/srcRs/DustDDS/build.rs index ca1d6f67..f30a196e 100644 --- a/srcRs/DustDDS/build.rs +++ b/srcRs/DustDDS/build.rs @@ -10,9 +10,8 @@ fn main() { let cargo_target_path = Path::new(&cargo_target_dir); let cargo_manifest_path = Path::new(&cargo_manifest_dir); let build_path = cargo_target_path.join("idl"); - let idl_path = cargo_manifest_path.join("..").join("..").join("srcCxx").join("shape.idl"); - let idl_src = std::fs::read_to_string(idl_path).expect("Couldn't read IDL source file!"); - let compiled_idl = dust_dds_gen::compile_idl(&idl_src).expect("Couldn't parse IDL file"); + let idl_path = cargo_manifest_path.join("..").join("..").join("srcCxx").join("shape.idl"); + let compiled_idl = dust_dds_gen::compile_idl(&idl_path).expect("Couldn't parse IDL file"); let compiled_idl_path = build_path.as_path().join("shape.rs"); fs::create_dir_all(build_path).expect("Creating build path failed"); let mut file = File::create(compiled_idl_path).expect("Failed to create file"); diff --git a/srcRs/DustDDS/src/main.rs b/srcRs/DustDDS/src/main.rs index c94be0e1..2b69116f 100644 --- a/srcRs/DustDDS/src/main.rs +++ b/srcRs/DustDDS/src/main.rs @@ -1,6 +1,7 @@ -use clap::Parser; +use clap::{Parser, ValueEnum}; use ctrlc; use dust_dds::{ + dds_async::topic::TopicAsync, domain::{ domain_participant::DomainParticipant, domain_participant_factory::DomainParticipantFactory, @@ -13,19 +14,19 @@ use dust_dds::{ self, DataRepresentationQosPolicy, DurabilityQosPolicy, HistoryQosPolicy, HistoryQosPolicyKind, OwnershipQosPolicy, OwnershipQosPolicyKind, OwnershipStrengthQosPolicy, PartitionQosPolicy, ReliabilityQosPolicy, - XCDR2_DATA_REPRESENTATION, XCDR_DATA_REPRESENTATION, + XCDR_DATA_REPRESENTATION, XCDR2_DATA_REPRESENTATION, }, - status::{InconsistentTopicStatus, StatusKind, NO_STATUS}, - time::{Duration, DurationKind}, - }, - publication::data_writer::DataWriter, - subscription::{ - data_reader::DataReader, sample_info::{ANY_INSTANCE_STATE, ANY_SAMPLE_STATE, ANY_VIEW_STATE}, + status::{InconsistentTopicStatus, NO_STATUS, StatusKind}, + time::DurationKind, }, - topic_definition::topic::Topic, + listener::NO_LISTENER, + publication::data_writer::DataWriter, + runtime::DdsRuntime, + std_runtime::StdRuntime, + subscription::data_reader::DataReader, }; -use rand::{random, thread_rng, Rng}; +use rand::{Rng, random, thread_rng}; use std::{ fmt::Debug, io::Write, @@ -33,8 +34,18 @@ use std::{ sync::mpsc::Receiver, }; -// ShapeType generated by build.rs from idl include!(concat!(env!("OUT_DIR"), "/idl/shape.rs")); +impl Clone for ShapeType { + fn clone(&self) -> Self { + Self { + color: self.color.clone(), + x: self.x, + y: self.y, + shapesize: self.shapesize, + additional_payload_size: self.additional_payload_size.clone(), + } + } +} fn qos_policy_name(id: i32) -> String { match id { @@ -65,6 +76,26 @@ fn qos_policy_name(id: i32) -> String { .to_string() } +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] +#[clap(rename_all = "kebab_case")] +enum FinalInstanceState { + /// unregister + U, + /// dispose + D, +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] +#[clap(rename_all = "kebab_case")] +enum AccessScope { + /// INSTANCE + I, + /// TOPIC + T, + /// GROUP + G, +} + #[derive(Parser, Clone)] #[command(author, version, about, long_about = None)] struct Options { @@ -92,13 +123,9 @@ struct Options { #[clap(short = 'k', default_value_t = -1, allow_negative_numbers = true)] history_depth: i32, - /// set a 'deadline' with interval (seconds) (0: OFF) + /// set a 'deadline' with interval (ms) (0: OFF) #[clap(short = 'f', default_value_t = 0)] - deadline_interval: i32, - - /// apply 'time based filter' with interval (seconds) (0: OFF) - #[clap(short = 'i', default_value_t = 0)] - timebasedfilter_interval: i32, + deadline_interval: u64, /// set ownership strength (-1: SHARED) #[clap(short = 's', default_value_t = -1, allow_negative_numbers = true)] @@ -147,6 +174,59 @@ struct Options { /// set log message verbosity (e: ERROR, d: DEBUG) #[clap(short = 'v', default_value_t = 'e')] log_message_verbosity: char, + + /// apply 'time based filter' with interval in ms [0: OFF] + #[clap(short = 'i', long = "time-filter")] + time_filter: Option, + + /// indicates the lifespan of a sample in ms + #[clap(short = 'l', long = "lifespan")] + lifespan: Option, + + /// indicates the number of iterations of the main loop + /// After that, the application will exit. Default (0): infinite + #[clap(short = 'n', long = "num-iterations")] + num_iterations: Option, + + /// indicates the number of instances a DataWriter writes If the value is > 1, the additional instances are + /// created by appending a number. For example, if the original color is "BLUE" the instances used are + /// "BLUE", "BLUE1", "BLUE2"... + #[clap(short = 'I', long = "num-instances")] + num_instances: Option, + + /// indicates the number of topics created (using the same type). This also creates a DataReader or DataWriter per + /// topic. If the value is > 1, the additional topic names are created by appending a number: For example, if the + /// original topic name is "Square", the topics created are "Square", "Square1", "Square2"... + #[clap(short = 'E', long = "num-topics")] + num_topics: Option, + + /// indicates the action performed after the DataWriter finishes its execution (before deleting it): + #[clap(short = 'M', long = "final-instance-state")] + final_instance_state: Option, + + /// sets Presentation.access_scope + #[clap(short = 'C', long = "access-scope")] + access_scope: Option, + + /// sets Presentation.coherent_access = true + #[clap(short = 'T', long = "coherent")] + coherent: bool, + + /// sets Presentation.ordered_access = true + #[clap(short = 'O', long = "ordered")] + ordered: bool, + + /// amount of samples sent for each DataWriter and instance that are grouped in a coherent set + #[clap(short = 'H', long = "coherent-sample-count")] + coherent_sample_count: Option, + + /// indicates the amount of bytes added to the samples written (for example to use large data) + #[clap(short = 'B', long = "additional-payload-size")] + additional_payload_size: Option, + + /// uses take()/read() instead of take_next_instance() read_next_instance() + #[clap(short = 'K', long = "take-read")] + take_read: bool, } impl Options { @@ -251,8 +331,12 @@ impl Options { } struct Listener; -impl DomainParticipantListener for Listener { - fn on_inconsistent_topic(&mut self, the_topic: Topic, _status: InconsistentTopicStatus) { +impl DomainParticipantListener for Listener { + async fn on_inconsistent_topic( + &mut self, + the_topic: TopicAsync, + _status: InconsistentTopicStatus, + ) { println!( "on_inconsistent_topic() topic: '{}' type: '{}'", the_topic.get_name(), @@ -260,9 +344,9 @@ impl DomainParticipantListener for Listener { ); } - fn on_offered_incompatible_qos( + async fn on_offered_incompatible_qos( &mut self, - the_writer: dust_dds::publication::data_writer::DataWriter<()>, + the_writer: dust_dds::dds_async::data_writer::DataWriterAsync, status: dust_dds::infrastructure::status::OfferedIncompatibleQosStatus, ) { let policy_name = qos_policy_name(status.last_policy_id); @@ -275,25 +359,25 @@ impl DomainParticipantListener for Listener { ); } - fn on_publication_matched( + async fn on_publication_matched( &mut self, - the_writer: dust_dds::publication::data_writer::DataWriter<()>, + the_writer: dust_dds::dds_async::data_writer::DataWriterAsync, status: dust_dds::infrastructure::status::PublicationMatchedStatus, ) { if !the_writer.get_topic().get_name().starts_with("DCPS") { println!( - "on_publication_matched() topic: '{}' type: '{}' : matched readers {} (change = {})", - the_writer.get_topic().get_name(), - the_writer.get_topic().get_type_name(), - status.current_count, - status.current_count_change - ); + "on_publication_matched() topic: '{}' type: '{}' : matched readers {} (change = {})", + the_writer.get_topic().get_name(), + the_writer.get_topic().get_type_name(), + status.current_count, + status.current_count_change + ); } } - fn on_offered_deadline_missed( + async fn on_offered_deadline_missed( &mut self, - the_writer: dust_dds::publication::data_writer::DataWriter<()>, + the_writer: dust_dds::dds_async::data_writer::DataWriterAsync, status: dust_dds::infrastructure::status::OfferedDeadlineMissedStatus, ) { println!( @@ -305,9 +389,9 @@ impl DomainParticipantListener for Listener { ); } - fn on_liveliness_lost( + async fn on_liveliness_lost( &mut self, - the_writer: dust_dds::publication::data_writer::DataWriter<()>, + the_writer: dust_dds::dds_async::data_writer::DataWriterAsync, status: dust_dds::infrastructure::status::LivelinessLostStatus, ) { println!( @@ -319,9 +403,9 @@ impl DomainParticipantListener for Listener { ); } - fn on_requested_incompatible_qos( + async fn on_requested_incompatible_qos( &mut self, - the_reader: DataReader<()>, + the_reader: dust_dds::dds_async::data_reader::DataReaderAsync, status: dust_dds::infrastructure::status::RequestedIncompatibleQosStatus, ) { let policy_name = qos_policy_name(status.last_policy_id); @@ -334,9 +418,9 @@ impl DomainParticipantListener for Listener { ); } - fn on_subscription_matched( + async fn on_subscription_matched( &mut self, - the_reader: DataReader<()>, + the_reader: dust_dds::dds_async::data_reader::DataReaderAsync, status: dust_dds::infrastructure::status::SubscriptionMatchedStatus, ) { if !the_reader @@ -345,18 +429,18 @@ impl DomainParticipantListener for Listener { .starts_with("DCPS") { println!( - "on_subscription_matched() topic: '{}' type: '{}' : matched writers {} (change = {})", - the_reader.get_topicdescription().get_name(), - the_reader.get_topicdescription().get_type_name(), - status.current_count, - status.current_count_change - ); + "on_subscription_matched() topic: '{}' type: '{}' : matched writers {} (change = {})", + the_reader.get_topicdescription().get_name(), + the_reader.get_topicdescription().get_type_name(), + status.current_count, + status.current_count_change + ); } } - fn on_requested_deadline_missed( + async fn on_requested_deadline_missed( &mut self, - the_reader: DataReader<()>, + the_reader: dust_dds::dds_async::data_reader::DataReaderAsync, status: dust_dds::infrastructure::status::RequestedDeadlineMissedStatus, ) { println!( @@ -368,9 +452,9 @@ impl DomainParticipantListener for Listener { ); } - fn on_liveliness_changed( + async fn on_liveliness_changed( &mut self, - the_reader: DataReader<()>, + the_reader: dust_dds::dds_async::data_reader::DataReaderAsync, status: dust_dds::infrastructure::status::LivelinessChangedStatus, ) { println!( @@ -411,9 +495,9 @@ fn move_shape( } fn init_publisher( - participant: &DomainParticipant, + participant: &DomainParticipant, options: Options, -) -> Result, InitializeError> { +) -> Result, InitializeError> { let topic = participant .lookup_topicdescription(&options.topic_name) .expect("lookup_topicdescription succeeds") @@ -422,7 +506,7 @@ fn init_publisher( partition: options.partition_qos_policy(), ..Default::default() }); - let publisher = participant.create_publisher(publisher_qos, None, NO_STATUS)?; + let publisher = participant.create_publisher(publisher_qos, NO_LISTENER, NO_STATUS)?; println!( "Create writer for topic: {} color: {}", options.topic_name, @@ -438,8 +522,9 @@ fn init_publisher( ..Default::default() }; if options.deadline_interval > 0 { - data_writer_qos.deadline.period = - DurationKind::Finite(Duration::new(options.deadline_interval, 0)); + data_writer_qos.deadline.period = DurationKind::Finite( + core::time::Duration::from_millis(options.deadline_interval).into(), + ); } if options.ownership_qos_policy().kind == OwnershipQosPolicyKind::Exclusive { data_writer_qos.ownership_strength = options.ownership_strength_qos_policy(); @@ -448,7 +533,7 @@ fn init_publisher( let data_writer = publisher.create_datawriter::( &topic, QosKind::Specific(data_writer_qos), - None, + NO_LISTENER, NO_STATUS, )?; @@ -456,7 +541,7 @@ fn init_publisher( } fn run_publisher( - data_writer: &DataWriter, + data_writer: &DataWriter, options: Options, all_done: Receiver<()>, ) -> Result<(), RunningError> { @@ -469,6 +554,7 @@ fn run_publisher( x: random::() % da_width, y: random::() % da_height, shapesize: options.shapesize, + additional_payload_size: vec![], }; // get random non-zero velocity. @@ -489,7 +575,6 @@ fn run_publisher( } move_shape(&mut shape, &mut x_vel, &mut y_vel, da_width, da_height); - data_writer.write(&shape, None).ok(); if options.print_writer_samples { println!( "{:10} {:10} {:03} {:03} [{:}]", @@ -500,6 +585,7 @@ fn run_publisher( shape.shapesize ); } + data_writer.write(shape.clone(), None).ok(); std::thread::sleep(std::time::Duration::from_millis( options.write_period_ms as u64, )); @@ -508,9 +594,9 @@ fn run_publisher( } fn init_subscriber( - participant: &DomainParticipant, + participant: &DomainParticipant, options: Options, -) -> Result, InitializeError> { +) -> Result, InitializeError> { let topic = participant .lookup_topicdescription(&options.topic_name) .expect("lookup_topicdescription succeeds") @@ -519,7 +605,7 @@ fn init_subscriber( partition: options.partition_qos_policy(), ..Default::default() }); - let subscriber = participant.create_subscriber(subscriber_qos, None, NO_STATUS)?; + let subscriber = participant.create_subscriber(subscriber_qos, NO_LISTENER, NO_STATUS)?; let mut data_reader_qos = DataReaderQos { durability: options.durability_qos_policy(), @@ -530,22 +616,30 @@ fn init_subscriber( ..Default::default() }; if options.deadline_interval > 0 { - data_reader_qos.deadline.period = - DurationKind::Finite(Duration::new(options.deadline_interval, 0)); + data_reader_qos.deadline.period = DurationKind::Finite( + core::time::Duration::from_millis(options.deadline_interval).into(), + ); } - let data_reader = match &options.color { + let data_reader = match options.color { // filter on specified color Some(color) => { let filtered_topic_name = options.topic_name + "_filtered"; println!( "Create reader for topic: {} color: {}", - filtered_topic_name, color + filtered_topic_name, &color ); - subscriber.create_datareader::( + let content_filtered_topic = participant.create_contentfilteredtopic( + &filtered_topic_name, &topic, + String::from("color = %0"), + vec![color], + )?; + + subscriber.create_datareader::( + &content_filtered_topic, QosKind::Specific(data_reader_qos), - None, + NO_LISTENER, NO_STATUS, )? } @@ -555,7 +649,7 @@ fn init_subscriber( subscriber.create_datareader::( &topic, QosKind::Specific(data_reader_qos), - None, + NO_LISTENER, NO_STATUS, )? } @@ -565,7 +659,7 @@ fn init_subscriber( } fn run_subscriber( - data_reader: &DataReader, + data_reader: &DataReader, options: Options, all_done: Receiver<()>, ) -> Result<(), RunningError> { @@ -593,8 +687,8 @@ fn run_subscriber( match read_result { Ok(samples) => { for sample in samples { - if sample.sample_info().valid_data { - let smaple_data = sample.data().expect("data present"); + if sample.sample_info.valid_data { + let smaple_data = sample.data.as_ref().expect("data present"); println!( "{:10} {:10} {:03} {:03} [{}]", data_reader.get_topicdescription().get_name(), @@ -605,7 +699,7 @@ fn run_subscriber( ); std::io::stdout().flush().expect("flush stdout succeeds"); } - previous_handle = Some(sample.sample_info().instance_handle); + previous_handle = Some(sample.sample_info.instance_handle); } std::thread::sleep(std::time::Duration::from_millis( @@ -619,12 +713,12 @@ fn run_subscriber( Ok(()) } -fn initialize(options: &Options) -> Result { +fn initialize(options: &Options) -> Result, InitializeError> { let participant_factory = DomainParticipantFactory::get_instance(); let participant = participant_factory.create_participant( options.domain_id, QosKind::Default, - Some(Box::new(Listener)), + Some(Listener), &[ StatusKind::InconsistentTopic, StatusKind::OfferedIncompatibleQos, @@ -642,7 +736,7 @@ fn initialize(options: &Options) -> Result { &options.topic_name, "ShapeType", QosKind::Default, - None, + NO_LISTENER, NO_STATUS, )?; @@ -713,7 +807,7 @@ fn main() -> Result<(), Return> { ctrlc::set_handler(move || tx.send(()).expect("Could not send signal on channel.")) .expect("Error setting Ctrl-C handler"); - let options = Options::parse(); + let options = Options::parse(); options.validate()?; let participant = initialize(&options)?; if options.publish { diff --git a/test_suite.py b/test_suite.py index ee0198f6..5bce2662 100644 --- a/test_suite.py +++ b/test_suite.py @@ -122,7 +122,7 @@ # RELIABILITY 'Test_Reliability_0' : { - 'apps' : ['-P -t Square -b -z 0', '-S -t Square -b -z 0'], + 'apps' : ['-P -t Square -b -z 0', '-S -t Square -b'], 'expected_codes' : [ReturnCode.OK, ReturnCode.OK], 'check_function' : tsf.test_reliability_order, 'title' : 'Communication between BEST_EFFORT publisher and subscriber', @@ -425,22 +425,39 @@ # Content Filtered Topic 'Test_Cft_0' : { - 'apps' : ['-P -t Square -r -c BLUE', '-P -t Square -r -c RED', '-S -t Square -r -c RED'], + 'apps' : ['-P -t Square -r -k 0 -c BLUE', '-P -t Square -r -k 0 -c RED', '-S -t Square -r -k 0 --cft "color = \'RED\'"'], 'expected_codes' : [ReturnCode.OK, ReturnCode.OK, ReturnCode.RECEIVING_FROM_ONE], 'check_function' : tsf.test_color_receivers, - 'title' : 'Use of Content filter to avoid receiving undesired data', - 'description' : 'Verifies a subscription using a ContentFilteredTopic does not receive data that does not pass the filter\n\n' + 'title' : 'Use of Content filter to avoid receiving undesired data (key)', + 'description' : 'Verifies a subscription using a ContentFilteredTopic does not receive data that does not ' + 'pass the filter. The filter is applied to the key "color"\n\n' ' * Configures a subscriber with a ContentFilteredTopic that selects only the shapes that ' 'have "color" equal to "RED"\n' ' * Configures a first publisher to publish samples with "color" equal to "BLUE"\n' ' * Configures a second publisher to publish samples with "color" equal to "RED"\n' ' * Use RELIABLE Qos in all publishers and subscriber to ensure any samples that are not ' 'received are due to filtering\n' + ' * Configures the publishers / subscriber with history KEEP_ALL\n' ' * Verifies that both publishers discover and match the subscriber and vice-versa\n' ' * Note that this test does not check whether the filtering happens in the publisher side or ' 'the subscriber side. It only checks the middleware filters the samples somewhere.\n\n' f'The test passes if the subscriber receives {tsf.MAX_SAMPLES_READ} samples of one color\n' - }, + }, + + 'Test_Cft_1': { + 'apps': ['-P -t Square -r -k 0 -z 0 --size-modulo 50', '-S -t Square -r -k 0 --cft "shapesize <= 20"'], + 'expected_codes': [ReturnCode.OK, ReturnCode.OK], + 'check_function': tsf.test_size_less_than_20, + 'title' : 'Use of Content filter to avoid receiving undesired data (non-key)', + 'description': 'Verifies a subscription using a ContentFilteredTopic does not receive data that does not ' + 'pass the filter. The filter is applied to the non-key member "shapesize".\n\n' + ' * Use RELIABLE Qos in all publishers and subscriber to avoid samples losses\n' + ' * Configures the publisher / subscriber with history KEEP_ALL\n' + ' * The publisher application sends samples with increasing value of the "size" member\n' + ' * Publisher sends samples with size cycling from 1 to 50 (using --size-modulo 50 and -z 0)\n' + ' * Subscriber uses --cft "shapesize <= 20"\n' + ' * The test passes if all received samples have size < 20\n' + }, # PARTITION 'Test_Partition_0' : { diff --git a/test_suite_functions.py b/test_suite_functions.py index 6baf9e3d..2fe137a1 100644 --- a/test_suite_functions.py +++ b/test_suite_functions.py @@ -305,6 +305,45 @@ def test_color_receivers(child_sub, samples_sent, last_sample_saved, timeout): print(f'Samples read: {samples_read}') return ReturnCode.RECEIVING_FROM_ONE +def test_size_less_than_20(child_sub, samples_sent, last_sample_saved, timeout): + """ + Checks that all received samples have size between 1 and 20 (inclusive). + Returns ReturnCode.OK if all samples are in range, otherwise ReturnCode.DATA_NOT_CORRECT. + """ + import re + from rtps_test_utilities import ReturnCode + + max_samples_received = MAX_SAMPLES_READ / 2 + samples_read = 0 + return_code = ReturnCode.OK + + sub_string = re.search('[0-9]+ [0-9]+ \[([0-9]+)\]', child_sub.before + child_sub.after) + + while sub_string is not None and samples_read < max_samples_received: + size = int(sub_string.group(1)) + if size < 1 or size > 20: + return_code = ReturnCode.DATA_NOT_CORRECT + break + + index = child_sub.expect( + [ + '\[[0-9]+\]', # index = 0 + pexpect.TIMEOUT, # index = 1 + pexpect.EOF # index = 2 + ], + timeout + ) + if index == 1 or index == 2: + return_code = ReturnCode.DATA_NOT_RECEIVED + break + + samples_read += 1 + sub_string = re.search('[0-9]+ [0-9]+ \[([0-9]+)\]', child_sub.before + child_sub.after) + + print(f'Samples read: {samples_read}') + return return_code + + def test_reliability_order(child_sub, samples_sent, last_sample_saved, timeout): """ This function tests reliability, it checks whether the subscriber receives