diff --git a/.github/pr_path_labeler.yml b/.github/pr_path_labeler.yml deleted file mode 100644 index b3c0a9d89..000000000 --- a/.github/pr_path_labeler.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Labeler triggered by .github/workflows/labels.yml - -pydarshan: -- any: ["darshan-util/pydarshan/**/*"] - -performance: -- any: ["darshan-util/pydarshan/benchmarks/**/*"] - -CI: -- any: ["**/*.yml"] -- any: ["**/*.yaml"] -- any: [".github/workflows/*.yml"] -- any: [".github/workflows/*.yaml"] -- any: [".github/*.yml"] -- any: [".github/*.yaml"] diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml deleted file mode 100644 index d91da2b90..000000000 --- a/.github/workflows/build_wheels.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: Build Wheels - -on: - push: - branches: - - main - paths: - - darshan-util/** - - include/** - - .github/workflows/build_wheels.yml - pull_request: - branches: - - main - paths: - - darshan-util/** - - include/** - - .github/workflows/build_wheels.yml - workflow_dispatch: - -jobs: - get_commit_message: - name: Get commit message - runs-on: ubuntu-latest - if: github.repository == 'darshan-hpc/darshan' - outputs: - message: ${{ steps.commit_message.outputs.message }} - steps: - - name: Checkout darshan - uses: actions/checkout@v3 - # Gets the correct commit message for pull request - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Get commit message - id: commit_message - run: | - set -xe - COMMIT_MSG=$(git log --no-merges -1 --oneline) - echo "::set-output name=message::$COMMIT_MSG" - echo github.ref ${{ github.ref }} - - build_wheels: - name: Build wheels on ${{ matrix.os }} - needs: get_commit_message - if: >- - contains(needs.get_commit_message.outputs.message, '[wheel build]') || - github.event_name == 'workflow_dispatch' - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest, macos-latest, macos-13] - - steps: - - uses: actions/checkout@v2 - - - name: Build wheels - uses: pypa/cibuildwheel@v2.19.2 - with: - package-dir: ./darshan-util/pydarshan - - - uses: actions/upload-artifact@v4 - with: - name: pydarshan-wheels-${{ matrix.os }} - path: ./wheelhouse/*.whl - retention-days: 0 - - merge_artifacts: - name: Merge built wheel artifacts - needs: build_wheels - runs-on: ubuntu-latest - steps: - - uses: actions/upload-artifact/merge@v4 - with: - name: pydarshan-wheels-all diff --git a/.github/workflows/darshan_ldms_test_ci.yml b/.github/workflows/darshan_ldms_test_ci.yml deleted file mode 100644 index c7ff1500f..000000000 --- a/.github/workflows/darshan_ldms_test_ci.yml +++ /dev/null @@ -1,179 +0,0 @@ -#Test overview: -# Test the Darshan LDMS Integrated code (e.g. darshanConnector). -# Build and Compile lastest LDMS release -# Build and Compile Darshan against the latest LDMS library -# Run an MPI-IO Test from Darshan's test suites -# Check that the test completes normally and LDMS is collecting runtime timeseries data -# Check that the test completes with Darshan when LDMS variables are not set - -name: Darshan-LDMS Integration Test - Latest - -on: - push: - branches: - - main - pull_request: - branches: - - main - workflow_dispatch: - -jobs: - test: - strategy: - matrix: - platform: [ubuntu-latest] - runs-on: ${{ matrix.platform }} - steps: - - uses: actions/checkout@v3 - - name: Install dependencies - run: | - sudo apt-get update -y - sudo apt-get install openmpi-bin libopenmpi-dev - sudo apt-get install libjansson-dev - sudo apt-get install python3-docutils - - name: Clone LDMS - uses: actions/checkout@v3 - with: - repository: ovis-hpc/ldms - path: ldms - ref: main - - name: Install LDMS - run: | - cd ldms - sh autogen.sh - set -e && mkdir -p build - pushd build - ../configure --prefix=/opt/ldms-latest --enable-etc - make && make install - - name: Install Darshan - run: | - git submodule update --init - # build darshan against LDMS library - export DARSHAN_INSTALL_PREFIX=/opt/darshan_install - export DARSHAN_RUNTIME_CONFIG_ARGS="--enable-ldms-mod --with-ldms=/opt/ldms-latest --with-jobid-env=NONE" - darshan-test/automated/build-darshan.sh - - name: Test Preparation and Run - run : | - echo "---setting Darshan environment---" - export DARSHAN_INSTALL_PATH=/opt/darshan_install - export LD_PRELOAD=$DARSHAN_INSTALL_PATH/lib/libdarshan.so - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$DARSHAN_INSTALL_PATH/lib - echo "---setting Darshan-LDMS environment---" - export DARSHAN_LDMS_STREAM=darshanConnector - export DARSHAN_LDMS_PORT=10444 - export DARSHAN_LDMS_HOST=localhost - export DARSHAN_LDMS_XPRT=sock - export DARSHAN_LDMS_AUTH=none - export DARSHAN_LDMS_ENABLE= - export DARSHAN_LDMS_ENABLE_MPIIO= - export DARSHAN_LDMS_ENABLE_POSIX= - export DARSHAN_LDMS_ENABLE_STDIO= - export DARSHAN_LDMS_ENABLE_HDF5= - echo "---setting Test Case Variables---" - export PROG=mpi-io-test - export DARSHAN_TMP=/tmp/darshan-ldms-test - export DARSHAN_TESTDIR=$PWD/darshan-test/regression - export DARSHAN_LOGFILE=$DARSHAN_TMP/${PROG}.darshan - echo "---checking TMP Path---" - if [ ! -d $DARSHAN_TMP ]; then - mkdir -p $DARSHAN_TMP - fi - if [ ! -d $DARSHAN_TMP ]; then - echo "Error: unable to find or create $DARSHAN_TMP" 1>&2 - exit 1 - fi - if [ ! -w $DARSHAN_TMP ]; then - echo "Error: unable to write to $DARSHAN_TMP" 1>&2 - exit 1 - fi - echo "---generating ldmsd configuration file---" - cat > stream-samp-latest.conf << EOF - load name=hello_sampler - config name=hello_sampler producer=host1 instance=host1/hello_sampler stream=darshanConnector component_id=1 - start name=hello_sampler interval=1000000 offset=0 - load name=stream_csv_store - config name=stream_csv_store path=./streams/store container=csv stream=darshanConnector rolltype=3 rollover=500000 - EOF - echo "---starting ldmsd---" - cat > ldmsd-latest.sh << EOF - . /opt/ldms-latest/etc/profile.d/set-ovis-variables.sh - ldmsd \$@ & - EOF - chmod 755 ldmsd-latest.sh - ./ldmsd-latest.sh -x sock:10444 -c stream-samp-latest.conf -l /tmp/stream-samp-latest.log -v DEBUG - echo "---check daemon is running---" - STREAM_SAMP_LATEST_PID=$(pgrep -f stream-samp-latest.conf) - pgrep -a ldmsd - [[ -n "${STREAM_SAMP_LATEST_PID}" ]] || error "stream-samp-latest.log is not running" - cat > ldms_ls-latest.sh << EOF - . /opt/ldms-latest/etc/profile.d/set-ovis-variables.sh - ldms_ls \$@ & - EOF - chmod 755 ldms_ls-latest.sh - ./ldms_ls-latest.sh -p 10444 -x sock -v -v - echo "---Test Case #1: Darshan-LDMS Env Set and Connected to LDMS Streams Deamon---" - mpicc $DARSHAN_TESTDIR/test-cases/src/${PROG}.c -o $DARSHAN_TMP/${PROG} - cd $DARSHAN_TMP - ./${PROG} -f $DARSHAN_TMP/${PROG}.tmp.dat 2>> $DARSHAN_TMP/${PROG}.err - echo "---Test Case #2: Darshan-LDMS Environment NOT Set---" - unset DARSHAN_LDMS_STREAM - unset DARSHAN_LDMS_PORT - unset DARSHAN_LDMS_HOST - unset DARSHAN_LDMS_XPRT - unset DARSHAN_LDMS_AUTH - unset DARSHAN_LDMS_ENABLE_MPIIO - unset DARSHAN_LDMS_ENABLE_POSIX - unset DARSHAN_LDMS_ENABLE_STDIO - unset DARSHAN_LDMS_ENABLE_HDF5 - export DARSHAN_LOGFILE=$DARSHAN_TMP/${PROG}-no-ldms-env.darshan - ./${PROG} -f $DARSHAN_TMP/${PROG}.tmp.dat 2>> $DARSHAN_TMP/${PROG}.err - echo "---Test Case #3: LDMS Streams Daemon NOT Connected---" - export DARSHAN_LDMS_STREAM=darshanConnector - export DARSHAN_LDMS_PORT=10444 - export DARSHAN_LDMS_HOST=localhost - export DARSHAN_LDMS_XPRT=sock - export DARSHAN_LDMS_AUTH=none - export DARSHAN_LDMS_ENABLE= - export DARSHAN_LDMS_ENABLE_MPIIO= - export DARSHAN_LDMS_ENABLE_POSIX= - export DARSHAN_LDMS_ENABLE_STDIO= - export DARSHAN_LDMS_ENABLE_HDF5= - export DARSHAN_LOGFILE=$DARSHAN_TMP/${PROG}-no-ldms-daemon.darshan - echo "---killing the daemon---" - kill ${STREAM_SAMP_LATEST_PID} - ./${PROG} -f $DARSHAN_TMP/${PROG}.tmp.dat 2>> $DARSHAN_TMP/${PROG}.err - echo "---Parse Darshan log file from darshanConnector Run---" - export DARSHAN_LOGFILE=$DARSHAN_TMP/${PROG}.darshan - if [ ! -x $DARSHAN_INSTALL_PATH/bin/darshan-parser ]; then - echo "Error: $DARSHAN_PATH doesn't contain a valid Darshan install." 1>&2 - exit 1 - fi - $DARSHAN_INSTALL_PATH/bin/darshan-parser --all $DARSHAN_LOGFILE > $DARSHAN_TMP/${PROG}.darshan.txt - if [ $? -ne 0 ]; then - echo "Error: failed to parse ${DARSHAN_LOGFILE}" 1>&2 - exit 1 - fi - - name: Check Results - run: | - export DARSHAN_TMP=/tmp/darshan-ldms-test - export PROG=mpi-io-test - echo "---View LDMS Output and Darshan Log Directory---" - ls -ltrch $DARSHAN_TMP - ls -ltrch streams/store/csv - cat $DARSHAN_TMP/${PROG}.err - echo "---Check Darshan Log Files Exist---" - if [[ ! -f "$DARSHAN_TMP/${PROG}.darshan" || ! -f "$DARSHAN_TMP/${PROG}-no-ldms-env.darshan" || ! -f "$DARSHAN_TMP/${PROG}-no-ldms-daemon.darshan" ]]; then - echo "One of the darshan log files do not exist." - exit 1 - fi - echo "---Check LDMS Data Collection---" - if ! compgen -G "streams/store/csv/darshanConnector.*" > /dev/null; then - echo "CSV file does not exist." - cat /tmp/stream-samp-latest.log - exit 1 - elif [[ -z "$(cat streams/store/csv/darshanConnector.*)" ]]; then - echo "No data was stored to CSV file." - cat /tmp/stream-samp-latest.log - exit 1 - fi - echo "DONE!" diff --git a/.github/workflows/end_to_end_pytest.yml b/.github/workflows/end_to_end_pytest.yml deleted file mode 100644 index d0d364851..000000000 --- a/.github/workflows/end_to_end_pytest.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: End-to-end Testing (pytest) - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - end_to_end_pytest: - strategy: - matrix: - platform: [ubuntu-latest] - python-version: ["3.10"] - runs-on: ${{ matrix.platform }} - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - sudo apt-get update -y - sudo apt-get install -y hdf5-tools libhdf5-openmpi-dev openmpi-bin - python -m pip install --upgrade pip - python -m pip install --upgrade pytest mpi4py "cython<3.0.0" numpy wheel pkgconfig setuptools - # we need to build h5py with the system HDF5 lib backend - export HDF5_MPI="ON" - # Install h5py https://github.com/h5py/h5py/issues/2222 - CC=mpicc python -m pip install --no-cache-dir --no-binary=h5py h5py --no-build-isolation - - name: Install Darshan - run: | - git submodule update --init - export C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/include/hdf5/openmpi/ - export DARSHAN_INSTALL_PREFIX=$PWD/darshan_install - export DARSHAN_RUNTIME_CONFIG_ARGS="--with-jobid-env=NONE --enable-hdf5-mod --with-log-path-by-env=DARSHAN_LOGPATH" - darshan-test/automated/build-darshan.sh - - name: Install Darshan (non-MPI) - run: | - export DARSHAN_INSTALL_PREFIX=$PWD/darshan_nonmpi_install - export DARSHAN_RUNTIME_CONFIG_ARGS="--without-mpi --with-jobid-env=NONE --with-log-path-by-env=DARSHAN_LOGPATH" - darshan-test/automated/build-darshan.sh - - name: Install pydarshan - run: | - cd darshan-util/pydarshan - python -m pip install . - - name: Test with pytest - run: | - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$PWD/darshan_install/lib - export DARSHAN_INSTALL_PATH=$PWD/darshan_install - export DARSHAN_NONMPI_INSTALL_PATH=$PWD/darshan_nonmpi_install - export DARSHAN_ROOT_PATH=$PWD - export HDF5_LIB=/usr/lib/x86_64-linux-gnu/hdf5/openmpi/libhdf5.so - python -m pytest darshan-test/python_runtime_tests.py diff --git a/.github/workflows/end_to_end_regression.yml b/.github/workflows/end_to_end_regression.yml deleted file mode 100644 index b2de5ee89..000000000 --- a/.github/workflows/end_to_end_regression.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: End-to-end Testing (regression) - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - end_to_end_regression: - strategy: - matrix: - platform: [ubuntu-latest] - runs-on: ${{ matrix.platform }} - steps: - - uses: actions/checkout@v3 - - name: Install dependencies - run: | - sudo apt-get update -y - sudo apt-get install -y gfortran bc - - name: Install MPICH - run: | - mkdir mpich_install - export MPICH_INSTALL_PATH=$PWD/mpich_install - wget https://www.mpich.org/static/downloads/3.2.1/mpich-3.2.1.tar.gz - tar -xzvf mpich-3.2.1.tar.gz - cd mpich-3.2.1 - mkdir build - cd build - FFLAGS="-w -fallow-argument-mismatch" ../configure --disable-dependency-tracking --prefix=$MPICH_INSTALL_PATH - make - make install - - name: Install Darshan - run: | - git submodule update --init - export PATH=$PWD/mpich_install/bin:$PATH - # use automated script to build Darshan - export DARSHAN_INSTALL_PREFIX=$PWD/darshan_install - darshan-test/automated/build-darshan.sh - # try to format log directory to ensure this works - ./darshan_install/bin/darshan-mk-log-dirs.pl - - name: Run end-to-end regression tests (ld_preload) - run: | - export DARSHAN_INSTALL_PATH=$PWD/darshan_install - export PATH=$PWD/mpich_install/bin:$PATH - cd darshan-test/regression - ./run-all.sh $DARSHAN_INSTALL_PATH /tmp/darshan-ld-preload workstation-ld-preload - - name: Run end-to-end regression tests (prof conf dynamic) - run: | - export DARSHAN_INSTALL_PATH=$PWD/darshan_install - export PATH=$PWD/mpich_install/bin:$PATH - cd darshan-test/regression - ./run-all.sh $DARSHAN_INSTALL_PATH /tmp/darshan-prof-conf-dynamic workstation-profile-conf-dynamic - - name: Run end-to-end regression tests (prof conf static) - run: | - export DARSHAN_INSTALL_PATH=$PWD/darshan_install - export PATH=$PWD/mpich_install/bin:$PATH - cd darshan-test/regression - ./run-all.sh $DARSHAN_INSTALL_PATH /tmp/darshan-prof-conf-static workstation-profile-conf-static diff --git a/.github/workflows/end_to_end_regression_aurora.yml b/.github/workflows/end_to_end_regression_aurora.yml deleted file mode 100644 index 37a0581e7..000000000 --- a/.github/workflows/end_to_end_regression_aurora.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: End-to-end Testing (regression) Aurora - -on: - push: - branches: - - main - paths: - - darshan-runtime/** - - .github/workflows/end_to_end_regression_aurora.yml - workflow_dispatch: - -jobs: - trigger_alcf_aurora_regression_ci: - runs-on: ubuntu-latest - steps: - - name: Get branch/tag name - run: echo "GITHUB_REF_NAME=$(echo ${GITHUB_REF} | cut --complement -d/ -f1,2)" >> $GITHUB_ENV - - uses: eic/trigger-gitlab-ci@v3 - with: - url: https://gitlab-ci.alcf.anl.gov - project_id: 174 - token: ${{ secrets.ALCF_GITLAB_CI_TOKEN_AURORA }} - variables: | - GITHUB_REF_NAME="${{ env.GITHUB_REF_NAME }}" diff --git a/.github/workflows/end_to_end_regression_polaris.yml b/.github/workflows/end_to_end_regression_polaris.yml deleted file mode 100644 index 4e78fd0ea..000000000 --- a/.github/workflows/end_to_end_regression_polaris.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: End-to-end Testing (regression) Polaris - -on: - push: - branches: - - main - paths: - - darshan-runtime/** - - .github/workflows/end_to_end_regression_polaris.yml - workflow_dispatch: - -jobs: - trigger_alcf_polaris_regression_ci: - runs-on: ubuntu-latest - steps: - - name: Get branch/tag name - run: echo "GITHUB_REF_NAME=$(echo ${GITHUB_REF} | cut --complement -d/ -f1,2)" >> $GITHUB_ENV - - uses: eic/trigger-gitlab-ci@v3 - with: - url: https://gitlab-ci.alcf.anl.gov - project_id: 121 - token: ${{ secrets.ALCF_GITLAB_CI_TOKEN_POLARIS }} - variables: | - GITHUB_REF_NAME="${{ env.GITHUB_REF_NAME }}" diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml deleted file mode 100644 index 338a73543..000000000 --- a/.github/workflows/labels.yml +++ /dev/null @@ -1,16 +0,0 @@ -# Triage PRs based on modified paths -# Rely on .github/pr_path_labeler.yml -# https://github.com/marketplace/actions/labeler -name: "Pull Request Labeler" -on: -- pull_request_target - -jobs: - triage-on-file-paths: - runs-on: ubuntu-latest - steps: - - uses: actions/labeler@v3 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - configuration-path: ".github/pr_path_labeler.yml" - sync-labels: true # remove labels if file not modified anymore diff --git a/.github/workflows/main_ci.yml b/.github/workflows/main_ci.yml deleted file mode 100644 index a54628f49..000000000 --- a/.github/workflows/main_ci.yml +++ /dev/null @@ -1,96 +0,0 @@ -name: Python Testing - -on: - push: - branches: - - main - paths: - - darshan-util/** - - include/** - pull_request: - branches: - - main - paths: - - darshan-util/** - - include/** - - .github/workflows/** - -jobs: - test_pydarshan: - strategy: - matrix: - platform: [ubuntu-latest, - macos-latest] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] - runs-on: ${{ matrix.platform }} - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade pyflakes asv pytest-cov "mypy<1.0.0" - - if: ${{matrix.platform == 'macos-latest'}} - name: Install MacOS deps - run: | - brew install automake - brew install libtool - - name: Install darshan-util - run: | - git submodule update --init - export DARSHAN_INSTALL_PREFIX=$PWD/darshan_install - DARSHAN_RUNTIME_SKIP=1 darshan-test/automated/build-darshan.sh - # run darshan-util make check target - make -C darshan_build/darshan-util check - - name: Install pydarshan - run: | - cd darshan-util/pydarshan - python -m pip install .[test] - # only install the darshan_logs project in some CI - # entries so we test proper handling of skips - # in test suite - - if: ${{matrix.python-version != 3.8}} - name: Install darshan_logs package - run: | - python -m pip install git+https://github.com/darshan-hpc/darshan-logs.git@main - - if: ${{matrix.python-version == 3.8}} - name: Use minimal deps - run: | - # uninstall deps that are not absolute requirements - # to make sure that i.e., tests skip appropriately - python -m pip uninstall -y lxml - - name: Test with pytest - run: | - export LD_LIBRARY_PATH=$PWD/darshan_install/lib - export DYLD_FALLBACK_LIBRARY_PATH=$PWD/darshan_install/lib - # the test suite should be portable - # to any directory--it must be possible - # to check HPC spack installs for example - cd $RUNNER_TEMP - site_packages=$(pip show darshan | grep Location | cut -d ' ' -f 2) - pytest -W error::FutureWarning -W error:"The join function was deprecated in Matplotlib" --pyargs darshan --cov-report xml --cov=$site_packages/darshan - - name: mypy check - run: | - export LD_LIBRARY_PATH=$PWD/darshan_install/lib - export DYLD_FALLBACK_LIBRARY_PATH=$PWD/darshan_install/lib - cd darshan-util/pydarshan - mypy darshan - - name: pyflakes check - run: | - cd darshan-util/pydarshan - pyflakes darshan/backend | (grep -E -i 'assigned to but never used' || exit 0 && exit 123) - - name: asv check - run: | - export LD_LIBRARY_PATH=$PWD/darshan_install/lib - export DYLD_FALLBACK_LIBRARY_PATH=$PWD/darshan_install/lib - cd darshan-util/pydarshan/benchmarks - python -m pip install "seaborn==0.12" - python -m asv check -E existing - - name: codecov check - uses: codecov/codecov-action@v3 - with: - files: $RUNNER_TEMP/coverage.xml,/home/runner/work/_temp/coverage.xml,/Users/runner/work/_temp/coverage.xml - fail_ci_if_error: False diff --git a/.github/workflows/openmpi.yml b/.github/workflows/openmpi.yml new file mode 100644 index 000000000..28055f3ca --- /dev/null +++ b/.github/workflows/openmpi.yml @@ -0,0 +1,184 @@ +name: OpenMPI 5.0.5 and 5.0.6 + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + build: + strategy: + matrix: + platform: [ubuntu-latest] + runs-on: ${{ matrix.platform }} + steps: + - uses: actions/checkout@v4 + - name: Install dependencies + run: | + sudo apt-get update -y + - name: Initialize Darshan + run: | + git submodule update --init + autoreconf -i + - name: Build OPENMPI 5.0.6 + run: | + cd ${GITHUB_WORKSPACE} + rm -rf OPENMPI ; mkdir OPENMPI ; cd OPENMPI + wget -q https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.6.tar.gz + gzip -dc openmpi-5.0.6.tar.gz | tar -xf - + cd openmpi-5.0.6 + ./configure --prefix=${GITHUB_WORKSPACE}/OPENMPI \ + CC=gcc \ + --disable-mpi-cxx --disable-mpi-fortran + make -s LIBTOOLFLAGS=--silent V=1 -j 8 install + - name: Install Darshan using OPENMPI 5.0.6 + if: ${{ always() }} + run: | + export PATH="${GITHUB_WORKSPACE}/OPENMPI/bin:$PATH" + DARSHAN_ROOT=${GITHUB_WORKSPACE} + DARSHAN_INSTALL=${GITHUB_WORKSPACE}/darshan_install + DARSHAN_BUILD=${GITHUB_WORKSPACE}/darshan_build + DARSHAN_LOG_PATH=${GITHUB_WORKSPACE}/darshan_logs + rm -rf ${DARSHAN_LOG_PATH} ${DARSHAN_BUILD} ${DARSHAN_INSTALL} + mkdir -p $DARSHAN_LOG_PATH $DARSHAN_BUILD + cd $DARSHAN_BUILD + $DARSHAN_ROOT/configure --prefix=${DARSHAN_INSTALL} \ + --with-log-path=${DARSHAN_LOG_PATH} \ + --with-jobid-env=NONE \ + CC=mpicc RUNTIME_CC=mpicc UTIL_CC=gcc + make -s LIBTOOLFLAGS=--silent V=1 -j8 + make -s install + - name: test a small MPI-IO program using OPENMPI 5.0.6 + if: ${{ always() }} + run: | + cd ${GITHUB_WORKSPACE} + export PATH="${GITHUB_WORKSPACE}/OPENMPI/bin:$PATH" + DARSHAN_INSTALL=${GITHUB_WORKSPACE}/darshan_install + TODAY_DATE_PATH=`date "+%Y/%-m/%-d"` + DARSHAN_LOG_PATH=${GITHUB_WORKSPACE}/darshan_logs/${TODAY_DATE_PATH} + DARSHAN_LOG_FILE="${DARSHAN_LOG_PATH}/${USER}_mpi_file_write*" + DARSHAN_PARSER=${DARSHAN_INSTALL}/bin/darshan-parser + + mkdir -p $DARSHAN_LOG_PATH + mpicc mpi_file_write.c -o mpi_file_write + + export LD_PRELOAD=${DARSHAN_INSTALL}/lib/libdarshan.so + + # test Darshan log parser + for iter in 1 2 3 4 5 + do + rm -f testfie ${DARSHAN_LOG_FILE} + mpiexec --oversubscribe -n 4 ./mpi_file_write + echo "mpiexec --oversubscribe -n 4 ./mpi_file_write" + EXPECT_NBYTE=`stat -c %s ./testfile` + + nbytes=`$DARSHAN_PARSER ${DARSHAN_LOG_FILE} | grep MPIIO_BYTES_WRITTEN | cut -f5` + echo "iter=$iter nbytes=$nbytes" + if test "x$nbytes" != "x$EXPECT_NBYTE" ; then + echo "Error: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE but nbytes=$nbytes" + exit 1 + else + echo "Success: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE and nbytes=$nbytes" + fi + done + + - name: Build OPENMPI 5.0.5 + if: ${{ always() }} + run: | + cd ${GITHUB_WORKSPACE} + rm -rf OPENMPI ; mkdir OPENMPI ; cd OPENMPI + wget -q https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.5.tar.gz + gzip -dc openmpi-5.0.5.tar.gz | tar -xf - + cd openmpi-5.0.5 + ./configure --prefix=${GITHUB_WORKSPACE}/OPENMPI \ + CC=gcc \ + --disable-mpi-cxx --disable-mpi-fortran + make -s LIBTOOLFLAGS=--silent V=1 -j 8 install + - name: Install Darshan using OPENMPI 5.0.5 + if: ${{ always() }} + run: | + export PATH="${GITHUB_WORKSPACE}/OPENMPI/bin:$PATH" + DARSHAN_ROOT=${GITHUB_WORKSPACE} + DARSHAN_INSTALL=${GITHUB_WORKSPACE}/darshan_install + DARSHAN_BUILD=${GITHUB_WORKSPACE}/darshan_build + DARSHAN_LOG_PATH=${GITHUB_WORKSPACE}/darshan_logs + rm -rf ${DARSHAN_LOG_PATH} ${DARSHAN_BUILD} ${DARSHAN_INSTALL} + mkdir -p $DARSHAN_LOG_PATH $DARSHAN_BUILD + cd $DARSHAN_BUILD + $DARSHAN_ROOT/configure --prefix=${DARSHAN_INSTALL} \ + --with-log-path=${DARSHAN_LOG_PATH} \ + --with-jobid-env=NONE \ + CC=mpicc RUNTIME_CC=mpicc UTIL_CC=gcc + make -s LIBTOOLFLAGS=--silent V=1 -j8 + make -s install + - name: test a small MPI-IO program using OPENMPI 5.0.5 + if: ${{ always() }} + run: | + cd ${GITHUB_WORKSPACE} + export PATH="${GITHUB_WORKSPACE}/OPENMPI/bin:$PATH" + DARSHAN_INSTALL=${GITHUB_WORKSPACE}/darshan_install + TODAY_DATE_PATH=`date "+%Y/%-m/%-d"` + DARSHAN_LOG_PATH=${GITHUB_WORKSPACE}/darshan_logs/${TODAY_DATE_PATH} + DARSHAN_LOG_FILE="${DARSHAN_LOG_PATH}/${USER}_mpi_file_write*" + DARSHAN_PARSER=${DARSHAN_INSTALL}/bin/darshan-parser + + mkdir -p $DARSHAN_LOG_PATH + mpicc mpi_file_write.c -o mpi_file_write + + export LD_PRELOAD=${DARSHAN_INSTALL}/lib/libdarshan.so + + # test Darshan log parser + for iter in 1 2 3 4 5 + do + rm -f testfie ${DARSHAN_LOG_FILE} + mpiexec --oversubscribe -n 4 ./mpi_file_write + echo "mpiexec --oversubscribe -n 4 ./mpi_file_write" + EXPECT_NBYTE=`stat -c %s ./testfile` + + nbytes=`$DARSHAN_PARSER ${DARSHAN_LOG_FILE} | grep MPIIO_BYTES_WRITTEN | cut -f5` + echo "iter=$iter nbytes=$nbytes" + if test "x$nbytes" != "x$EXPECT_NBYTE" ; then + echo "Error: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE but nbytes=$nbytes" + exit 1 + else + echo "Success: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE and nbytes=$nbytes" + fi + done + + - name: test OPENMPI 5.0.5 with fbtl_posix_read_datasieving set to 0 + if: ${{ always() }} + run: | + cd ${GITHUB_WORKSPACE} + export PATH="${GITHUB_WORKSPACE}/OPENMPI/bin:$PATH" + DARSHAN_INSTALL=${GITHUB_WORKSPACE}/darshan_install + TODAY_DATE_PATH=`date "+%Y/%-m/%-d"` + DARSHAN_LOG_PATH=${GITHUB_WORKSPACE}/darshan_logs/${TODAY_DATE_PATH} + DARSHAN_LOG_FILE="${DARSHAN_LOG_PATH}/${USER}_mpi_file_write*" + DARSHAN_PARSER=${DARSHAN_INSTALL}/bin/darshan-parser + + mkdir -p $DARSHAN_LOG_PATH + mpicc mpi_file_write.c -o mpi_file_write + + export LD_PRELOAD=${DARSHAN_INSTALL}/lib/libdarshan.so + + # test Darshan log parser + for iter in 1 2 3 4 5 + do + rm -f testfie ${DARSHAN_LOG_FILE} + mpiexec --mca fbtl_posix_write_datasieving 0 --oversubscribe -n 4 ./mpi_file_write + echo "mpiexec --mca fbtl_posix_write_datasieving 0 --oversubscribe -n 4 ./mpi_file_write" + EXPECT_NBYTE=`stat -c %s ./testfile` + + nbytes=`$DARSHAN_PARSER ${DARSHAN_LOG_FILE} | grep MPIIO_BYTES_WRITTEN | cut -f5` + echo "iter=$iter nbytes=$nbytes" + if test "x$nbytes" != "x$EXPECT_NBYTE" ; then + echo "Error: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE but nbytes=$nbytes" + exit 1 + else + echo "Success: Darshan log parser EXPECT_NBYTE=$EXPECT_NBYTE and nbytes=$nbytes" + fi + done + diff --git a/mpi_file_write.c b/mpi_file_write.c new file mode 100644 index 000000000..f828d57c4 --- /dev/null +++ b/mpi_file_write.c @@ -0,0 +1,54 @@ +#include +#include +#include +#include /* unlink() */ +#include + +#define CHECK_ERROR(fnc) { \ + if (err != MPI_SUCCESS) { \ + int errorStringLen; \ + char errorString[MPI_MAX_ERROR_STRING]; \ + MPI_Error_string(err, errorString, &errorStringLen); \ + printf("Error at line %d when calling %s: %s\n",__LINE__,fnc,errorString); \ + } \ +} + +#define NELEMS 8 + +/*----< main() >------------------------------------------------------------*/ +int main(int argc, char **argv) +{ + char buf[NELEMS]; + int i, err, rank, omode; + MPI_Offset offset; + MPI_Count nbytes; + MPI_File fh; + MPI_Status status; + + MPI_Init(&argc, &argv); + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + + offset = rank * NELEMS; + nbytes = NELEMS; + for (i=0; i