diff --git a/.github/workflows/main-cmake.yml b/.github/workflows/main-cmake.yml new file mode 100644 index 0000000000..70432e5c4a --- /dev/null +++ b/.github/workflows/main-cmake.yml @@ -0,0 +1,163 @@ +name: NetCDF-C CMake CI - Windows + +on: [pull_request, workflow_dispatch] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + + cmake_build_and_test: + strategy: + + matrix: + name: + - "Windows MSVC" + hdf5: + - "1.14.3" + + # Visual Studio + CMake + include: + - name: "Windows MSVC" + os: windows-latest + generator: "-G \"Visual Studio 17 2022\"" + + name: "${{ matrix.name }}" + + runs-on: ${{ matrix.os }} + + # Each step in the job. + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + steps: + - uses: msys2/setup-msys2@v2 + with: + update: true + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 + with: + miniconda-version: "latest" + activate-environment: "" + auto-activate-base: true + + - name: Set up Paths and env + shell: bash -el {0} + run: | + echo "" >> ~/.bash_profile + cat ~/.bash_profile + + + - name: Dump Matrix Context + run: echo '${{ toJSON(matrix) }}' + + #- run: echo "CMAKE_PREFIX_PATH=${env.CONDA_PREFIX}/Library" >> $GITHUB_ENV + #- run: echo "/c/Users/runneradmin/miniconda3/Library/lib:${GITHUB_PATH}" >> $GITHUB_ENV + #- run: echo "" + #- run: echo "CTEST_OUTPUT_ON_FAILURE=1" >> $GITHUB_ENV + + # Grab miniconda and use it to install HDF5 + - name: Install Dependencies using Miniconda + run: | + conda config --set always_yes yes --set changeps1 no --set show_channel_urls true + conda config --add channels conda-forge + conda update conda + conda install hdf5=${{ matrix.hdf5 }} m2-m4 libxml2 + shell: bash -el {0} + + # Double-check something + - name: Check Miniconda + run: | + which h5dump + which m4 + shell: bash -el {0} + + # Check current directory + - name: Query Current Environment + run: | + ls + echo "" + echo "PATH: $PATH" + echo "" + env + echo "" + ls $CONDA_PREFIX/Library + echo "" + ls $CONDA_PREFIX/Library/include/ + shell: bash -el {0} + + - name: Perform out-of-directory configuration + shell: bash -el {0} + run: | + mkdir build + cd build + cmake .. -DCMAKE_PREFIX_PATH="${CONDA_PREFIX}/Library" -DCMAKE_C_FLAGS="-I${CONDA_PREFIX}/Library/include" -DCMAKE_INSTALL_PREFIX=~/tmp -DNETCDF_ENABLE_FILTER_TESTING=OFF + if: ${{ success() }} + + - name: View cache - configuration + shell: bash -el {0} + run: | + cd build + cmake -L . + if: ${{ success() }} + + - name: Print Summary + shell: bash -el {0} + run: | + cd build + cat libnetcdf.settings + + - name: Perform out-of-directory build - libnetcdf + shell: bash -el {0} + run: | + cd build + cmake --build . --config Release --target netcdf -j 4 + + - name: Perform out-of-directory install - libnetcdf + shell: bash -el {0} + run: | + cd build + cmake --build . --config Release --target install -j 4 + if: ${{ success() }} + + - name: View config.h - libnetcdf failure + shell: bash -el {0} + run: | + cd build + cat config.h + if: ${{ failure() }} + + - name: Perform out-of-directory build - test suite + shell: bash -el {0} + run: | + cd build + cmake --build . --config Release -j 4 + if: ${{ success() }} + + - name: View config.h - tests failure failure + shell: bash -el {0} + run: | + cd build + cat config.h + if: ${{ failure() }} + + - name: Prepare ctest Paths and env + shell: bash -el {0} + run: | + cat ~/.bash_profile + echo "" >> ~/.bash_profile + + - name: Run ctest + shell: bash -el {0} + run: | + echo "Run ctest PATH: $PATH" + echo "Run ctest combined PATH: $PATH" + echo "Run ctest combined GITHUB_PATH: $PATH" + cd build + PATH=~/tmp/bin:$PATH ctest . -j 4 -E 'bom' --output-on-failure + + - name: Verbose Output if CTest Failure + shell: bash -el {0} + run: | + cd build + PATH=~/tmp/bin:$PATH ctest . --rerun-failed --output-on-failure -VV + if: ${{ failure() }} \ No newline at end of file diff --git a/.github/workflows/run_tests_cdash.yml b/.github/workflows/run_tests_cdash.yml index 5dd5c85cbc..1d135a1b04 100644 --- a/.github/workflows/run_tests_cdash.yml +++ b/.github/workflows/run_tests_cdash.yml @@ -21,7 +21,7 @@ jobs: hdf5: [ 1.10.8, 1.12.2, 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -32,7 +32,7 @@ jobs: ### - name: Cache libhdf5-${{ matrix.hdf5 }} id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -70,7 +70,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -81,7 +81,7 @@ jobs: ### - name: Cache libhdf5-parallel-${{ matrix.hdf5 }} id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }} @@ -129,7 +129,7 @@ jobs: hdf5: [ 1.10.8, 1.12.2, 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: CDASH_TOKEN: ${{ secrets.CDASH_TOKEN }} env: @@ -153,7 +153,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} diff --git a/.github/workflows/run_tests_osx.yml b/.github/workflows/run_tests_osx.yml index 04ea6740c7..ba64c777c2 100644 --- a/.github/workflows/run_tests_osx.yml +++ b/.github/workflows/run_tests_osx.yml @@ -24,14 +24,14 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # libhdf5 ### - name: Cache libhdf5-${{ runner.os }}-${{ matrix.hdf5 }} id: cache-hdf5-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -62,7 +62,7 @@ jobs: use_nczarr: [ nczarr_off, nczarr_on ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # Set Environmental Variables @@ -94,7 +94,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -168,7 +168,7 @@ jobs: use_nczarr: [ nczarr_off, nczarr_on ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # Set Environmental Variables @@ -200,7 +200,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -260,7 +260,7 @@ jobs: hdf5: [ 1.12.2, 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # Set Environmental Variables @@ -277,7 +277,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -342,7 +342,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # Set Environmental Variables @@ -357,7 +357,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -416,7 +416,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 ### # Set Environmental Variables @@ -432,7 +432,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5-osx - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} diff --git a/.github/workflows/run_tests_s3.yml b/.github/workflows/run_tests_s3.yml index 61af997dc0..b7f9d3fe6f 100644 --- a/.github/workflows/run_tests_s3.yml +++ b/.github/workflows/run_tests_s3.yml @@ -26,7 +26,7 @@ jobs: hdf5: [ 1.10.8, 1.12.2, 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -37,7 +37,7 @@ jobs: ### - name: Cache libhdf5-${{ matrix.hdf5 }} id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -73,7 +73,7 @@ jobs: hdf5: [ 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -94,7 +94,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} diff --git a/.github/workflows/run_tests_ubuntu.yml b/.github/workflows/run_tests_ubuntu.yml index 186f8da180..a755db876e 100644 --- a/.github/workflows/run_tests_ubuntu.yml +++ b/.github/workflows/run_tests_ubuntu.yml @@ -21,7 +21,7 @@ jobs: hdf5: [ 1.10.8, 1.12.2, 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -32,7 +32,7 @@ jobs: ### - name: Cache libhdf5-${{ matrix.hdf5 }} id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -70,7 +70,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -81,7 +81,7 @@ jobs: ### - name: Cache libhdf5-parallel-${{ matrix.hdf5 }} id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }} @@ -131,7 +131,7 @@ jobs: hdf5: [ 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -152,7 +152,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -226,7 +226,7 @@ jobs: hdf5: [ 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -247,7 +247,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -322,7 +322,7 @@ jobs: hdf5: [ 1.14.3 ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -338,7 +338,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }} @@ -403,7 +403,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -422,7 +422,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -483,7 +483,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -503,7 +503,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -565,7 +565,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -584,7 +584,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }} @@ -699,7 +699,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} @@ -808,7 +808,7 @@ jobs: use_nczarr: [ nczarr_off, nczarr_on ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install System dependencies shell: bash -l {0} @@ -842,7 +842,7 @@ jobs: - name: Fetch HDF Cache id: cache-hdf5 - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/environments/${{ matrix.hdf5 }} key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }} diff --git a/.github/workflows/run_tests_win_cygwin.yml b/.github/workflows/run_tests_win_cygwin.yml index 6e124448ed..e7a4d8002a 100644 --- a/.github/workflows/run_tests_win_cygwin.yml +++ b/.github/workflows/run_tests_win_cygwin.yml @@ -28,7 +28,7 @@ jobs: - name: Fix line endings run: git config --global core.autocrlf input - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: cygwin/cygwin-install-action@v2 with: diff --git a/.github/workflows/run_tests_win_mingw.yml b/.github/workflows/run_tests_win_mingw.yml index 978275cf6c..db0be3748e 100644 --- a/.github/workflows/run_tests_win_mingw.yml +++ b/.github/workflows/run_tests_win_mingw.yml @@ -26,7 +26,7 @@ jobs: steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: msys2/setup-msys2@v2 with: msystem: MINGW64 diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 6e4b476744..0000000000 --- a/appveyor.yml +++ /dev/null @@ -1,44 +0,0 @@ -image: Visual Studio 2019 - -environment: - matrix: - - TARGET_ARCH: x64 - CONDA_INSTALL_LOCN: C:\\Miniconda-x64 - MSYS2_INSTALL_LOCN: C:\msys64 - MSYS2_BIN_LOCN: C:\msys64\usr\bin - CMAKE_GENERATOR: "Visual Studio 16" - -platform: - - x64 - -branches: - except: -# - /.*[.]dmh/ - - /.*[.]wif/ - -# Do not build feature branch with open Pull Requests -skip_branch_with_pr: true - -install: - - cmd: set SRC_DIR=%cd% - - cmd: set INSTALL_LOC=%SRC_DIR%\install - - cmd: set PATH=%PATH%;%MSYS2_BIN_LOCN%;%INSTALL_LOC%\bin;%INSTALL_LOC%\lib - - cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat - - cmd: conda config --set always_yes yes --set changeps1 no --set show_channel_urls true - - cmd: conda update conda - - cmd: conda install hdf5=1.8.18 curl hdf4 zlib - -configuration: Release - -build: off - -# Run a custom script. -build_script: - - cmd: mkdir build - - cmd: cd build - - cmd: cmake .. -G "%CMAKE_GENERATOR%" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=%INSTALL_LOC% -DNETCDF_ENABLE_BASH_SCRIPT_TESTING=OFF -DNETCDF_ENABLE_FILTER_TESTING=OFF -DNETCDF_ENABLE_BYTERANGE=ON -DCMAKE_PREFIX_PATH=%CONDA_INSTALL_LOCN% - - cmd: if errorlevel 1 exit 1 - - cmd: cmake --build . --config Release -- /maxcpucount:4 - -test_script: - - cmd: cmake --build . --config Release --target install -- /maxcpucount:4 diff --git a/libdispatch/dmissing.c b/libdispatch/dmissing.c index 42c2d773ec..0c9be82745 100644 --- a/libdispatch/dmissing.c +++ b/libdispatch/dmissing.c @@ -44,7 +44,7 @@ strdup(const char* s) #endif -#ifndef WIN32 +#if !defined(_MSC_VER) && !defined(WIN32) #ifndef HAVE_STRLCPY /* diff --git a/libdispatch/ncrandom.c b/libdispatch/ncrandom.c index bfb1fbe3ca..7575aa72ad 100644 --- a/libdispatch/ncrandom.c +++ b/libdispatch/ncrandom.c @@ -19,7 +19,7 @@ int main() { unsigned int urnd = 0; /* range 0..2147483647 */ -#ifdef WIN32 +#if defined(WIN32) || defined(_MSC_VER) (void)rand_s(&urnd); #else long rnd; diff --git a/ncdump/tst_bom.sh b/ncdump/tst_bom.sh index 63eb38d88d..9b11b9ab70 100755 --- a/ncdump/tst_bom.sh +++ b/ncdump/tst_bom.sh @@ -6,7 +6,7 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi # This shell script tests BOM support in ncgen set -e - +set -x # add hack for sunos export srcdir; @@ -28,7 +28,13 @@ echo "*** Generate a cdl file with leading UTF-8 BOM." ${execdir}/bom 8 >tst_bom8.cdl cat tst_bom.cdl >> tst_bom8.cdl +echo "" +echo "Viewing tst_bom8.cdl:" +cat tst_bom8.cdl +echo "" + echo "*** Verify .nc file" + ${NCGEN} -k nc3 -o tst_bom8.nc tst_bom8.cdl ${NCDUMP} -n tst_bom tst_bom8.nc > tmp_bom.cdl diff -w tst_bom.cdl tmp_bom.cdl @@ -40,6 +46,11 @@ rm -f tmp_bom.cdl tst_bom8.* tst_bom16.* echo "*** Generate a cdl file with leading UTF-16 BOM." ${execdir}/bom 16 >tst_bom16.cdl cat tst_bom.cdl >> tst_bom16.cdl +echo "" +echo "Viewing tst_bom16.cdl:" +cat tst_bom16.cdl +echo "" + echo "*** Verify UTF-16 file fails" if ${NCGEN} -k nc3 -o tst_bom16.nc tst_bom16.cdl ; then diff --git a/nczarr_test/run_nccopyz.sh b/nczarr_test/run_nccopyz.sh index 66b4286cd7..113f86085b 100755 --- a/nczarr_test/run_nccopyz.sh +++ b/nczarr_test/run_nccopyz.sh @@ -22,7 +22,7 @@ verifychunking() { f=$1 shift for t in "$@" ; do - x=`cat $f | tr -d "\t \r" | sed -e "/$t/p" -ed` + x=`cat $f | tr -d "[:space:]" | sed -e "/$t/p" -ed` if test "x$x" = x ; then echo "$f: $t not found"; exit 1; fi done } @@ -71,9 +71,12 @@ fileargs tmp_pds ${NCCOPY} -M0 -4 -c "time/10,lat/15,lon/20" "$SRC" "$fileurl" ${NCDUMP} -n tmp_pds -hs "$fileurl" > tmp_pds.cdl -STORAGE=`cat tmp_pds.cdl | sed -e "/tas:_Storage/p" -ed | tr '"' "'" | tr -d "\t \r"` -test "x$STORAGE" = "xtas:_Storage='chunked';" -CHUNKSIZES=`cat tmp_pds.cdl | sed -e "/tas:_ChunkSizes/p" -ed | tr -d "\t \r"` + +STORAGE=`cat tmp_pds.cdl | sed -e "/tas:_Storage/p" -ed | tr -d "[:space:]"` +echo "STORAGE: $STORAGE" + +test "x$STORAGE" = "xtas:_Storage='chunked';" || test "x$STORAGE" = "xtas:_Storage=\"chunked\";" +CHUNKSIZES=`cat tmp_pds.cdl | sed -e "/tas:_ChunkSizes/p" -ed | tr -d "[:space:]"` test "x$CHUNKSIZES" = "xtas:_ChunkSizes=10,15,20;" }