diff --git a/.github/workflows/main-cmake.yml b/.github/workflows/main-cmake.yml index 85dee08450..7172fe03e7 100644 --- a/.github/workflows/main-cmake.yml +++ b/.github/workflows/main-cmake.yml @@ -1,6 +1,6 @@ name: NetCDF-C CMake CI - Windows -on: [pull_request,workflow_dispatch] +on: [push,pull_request, workflow_dispatch] env: REMOTETESTDOWN: no diff --git a/.github/workflows/run_tests_osx.yml b/.github/workflows/run_tests_osx.yml index 5589bb0266..51e85a37e8 100644 --- a/.github/workflows/run_tests_osx.yml +++ b/.github/workflows/run_tests_osx.yml @@ -6,7 +6,7 @@ name: Run macOS-based netCDF Tests -on: [pull_request,workflow_dispatch] +on: [push,pull_request,workflow_dispatch] env: REMOTETESTDOWN: no diff --git a/.github/workflows/run_tests_ubuntu.yml b/.github/workflows/run_tests_ubuntu.yml index 86c8c540a1..d67697dfe3 100644 --- a/.github/workflows/run_tests_ubuntu.yml +++ b/.github/workflows/run_tests_ubuntu.yml @@ -4,7 +4,7 @@ name: Run Ubuntu/Linux netCDF Tests -on: [pull_request,workflow_dispatch] +on: [push,pull_request,workflow_dispatch] env: REMOTETESTDOWN: no diff --git a/.github/workflows/run_tests_win_cygwin.yml b/.github/workflows/run_tests_win_cygwin.yml index c4fed134a3..41b6d4a550 100644 --- a/.github/workflows/run_tests_win_cygwin.yml +++ b/.github/workflows/run_tests_win_cygwin.yml @@ -1,6 +1,6 @@ name: Run Cygwin-based tests -on: [pull_request,workflow_dispatch] +on: [push,pull_request,workflow_dispatch] concurrency: group: ${{ github.workflow}}-${{ github.head_ref }} @@ -17,7 +17,7 @@ jobs: runs-on: windows-latest defaults: run: - shell: bash -eo pipefail -o igncr "{0}" + shell: C:/cygwin/bin/bash.exe -eo pipefail -o igncr "{0}" name: Cygwin-based Autotools tests @@ -27,19 +27,21 @@ jobs: steps: - name: Fix line endings + shell: pwsh run: git config --global core.autocrlf input - uses: actions/checkout@v4 - - uses: cygwin/cygwin-install-action@v2 + - uses: cygwin/cygwin-install-action@v4 with: platform: x86_64 + install-dir: 'C:\cygwin' packages: >- git automake libtool autoconf2.5 make libhdf5-devel libhdf4-devel zipinfo libxml2-devel perl zlib-devel libzstd-devel libbz2-devel libaec-devel libzip-devel - libdeflate-devel gcc-core libcurl-devel libiconv-devel - libssl-devel libcrypt-devel + libdeflate-devel gcc-core gcc-g++ libcurl-devel libiconv-devel + libssl-devel libcrypt-devel attr libattr-devel - name: (Autotools) Run autoconf and friends run: | @@ -76,7 +78,81 @@ jobs: if [ $(find /tmp/pretend-root/$(pwd) -type f | wc -l) -gt 0 ]; then exit 1; fi fi - - name: (Autotools) Build and run tests + - name: (Autotools) Build tests timeout-minutes: 30 run: | - make check -j8 SHELL=/bin/dash + make check -j$(nproc) TESTS="" SHELL=/bin/dash + + - name: (Autotools) Run tests + timeout-minutes: 30 + run: | + make check -j$(nproc) SHELL=/bin/dash + + build-and-test-cmake: + name: Cygwin-based CMake tests + runs-on: windows-latest + defaults: + run: + shell: C:/cygwin/bin/bash.exe -eo pipefail -o igncr "{0}" + + steps: + + - run: git config --global core.autocrlf input + shell: pwsh + - uses: actions/checkout@v4 + - uses: cygwin/cygwin-install-action@v4 + with: + platform: x86_64 + install-dir: 'C:\cygwin' + packages: >- + git automake libtool autoconf2.5 make libhdf5-devel + libhdf4-devel zipinfo libxml2-devel perl zlib-devel + libzstd-devel libbz2-devel libaec-devel libzip-devel + libdeflate-devel gcc-core gcc-g++ libcurl-devel libiconv-devel + libssl-devel libcrypt-devel cmake ninja make m4 diffutils unzip + +### +# Configure and build +### + + - name: (CMake) Configure Build + env: + MAKE: "/usr/bin/make" + CXX: "/usr/bin/g++" + run: | + /usr/bin/cmake \ + -G"Unix Makefiles" \ + -B build \ + -S . \ + -DCMAKE_INSTALL_PREFIX=/tmp \ + -DBUILD_SHARED_LIBS=ON \ + -DNETCDF_ENABLE_NETCDF_4=ON \ + -DNETCDF_BUILD_UTILITIES=ON \ + -DNETCDF_ENABLE_TESTS=ON \ + -DNETCDF_ENABLE_HDF5=ON \ + -DNETCDF_ENABLE_NCZARR=TRUE \ + -DNETCDF_ENABLE_PLUGINS=ON + if: ${{ success() }} + + - name: (CMake) Look at CMakeCache.txt if error + run: cat build/CMakeCache.txt + if: ${{ failure() }} + + - name: (CMake) Print Summary + run: cat build/libnetcdf.settings + + - name: (CMake) Build All + env: + MAKE: "/usr/bin/make" + CXX: "/usr/bin/g++" + run: cmake --build build -j$(nproc) + if: ${{ success() }} + + - name: (CMake) Run Tests + run: PATH=$PWD/build:$PATH ctest --test-dir build + if: ${{ success() }} + + - name: (CMake) Verbose output of CTest failures + run: >- + PATH=$PWD/build:$PATH ctest --test-dir build --output-on-failure -j$(nproc) --rerun-failed -VV + if: ${{ failure() }} diff --git a/.github/workflows/run_tests_win_mingw.yml b/.github/workflows/run_tests_win_mingw.yml index 072f92f94d..069befc366 100644 --- a/.github/workflows/run_tests_win_mingw.yml +++ b/.github/workflows/run_tests_win_mingw.yml @@ -10,7 +10,7 @@ env: #CPPFLAGS: "-D_BSD_SOURCE" REMOTETESTDOWN: no -on: [pull_request,workflow_dispatch] +on: [push,pull_request,workflow_dispatch] concurrency: group: ${{ github.workflow}}-${{ github.head_ref }} diff --git a/CMakeLists.txt b/CMakeLists.txt index 7d72e5b963..da497de089 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -16,10 +16,29 @@ project(netCDF VERSION 4.9.3 ) -#Add custom CMake Module +##### +# Version Info: +# +# Release Version +# Library Version +# SO Version +# +# SO Version is computed from library version. See: +# http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning +##### -set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules/;${PROJECT_SOURCE_DIR}/cmake" - CACHE INTERNAL "Location of our custom CMake modules.") +set(NC_VERSION_NOTE "-rc1") +set(netCDF_VERSION ${PROJECT_VERSION}${NC_VERSION_NOTE}) +set(VERSION ${netCDF_VERSION}) +set(NC_VERSION ${netCDF_VERSION}) +set(PACKAGE_VERSION ${VERSION}) + +# These values should match those in configure.ac +set(netCDF_LIB_VERSION 22) +set(netCDF_SO_VERSION 22) + +#Add custom CMake Module +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules" "${PROJECT_SOURCE_DIR}/cmake") set(PACKAGE "netCDF" CACHE STRING "") @@ -53,26 +72,7 @@ endif() add_library(netcdf) add_library(netCDF::netcdf ALIAS netcdf) -##### -# Version Info: -# -# Release Version -# Library Version -# SO Version -# -# SO Version is computed from library version. See: -# http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning -##### -set(NC_VERSION_NOTE "-development") -set(netCDF_VERSION ${PROJECT_VERSION}${NC_VERSION_NOTE}) -set(VERSION ${netCDF_VERSION}) -set(NC_VERSION ${netCDF_VERSION}) -set(PACKAGE_VERSION ${VERSION}) - -# These values should match those in configure.ac -set(netCDF_LIB_VERSION 19) -set(netCDF_SO_VERSION 19) # Version of the dispatch table. This must match the value in # configure.ac. @@ -89,6 +89,11 @@ if(UNAME) set(TMP_BUILDNAME "${osname}-${osrel}-${cpu}") endif() +find_program(GETFATTR NAMES getfattr) +if(GETFATTR) + set(HAVE_GETFATTR TRUE) +endif() + # Define some Platforms if(osname MATCHES "CYGWIN.*") set(ISCYGWIN yes) @@ -534,7 +539,7 @@ endif() # enable|disable all forms of network access option(NETCDF_ENABLE_REMOTE_FUNCTIONALITY "Enable|disable all forms remote data access (DAP, S3, etc)" ON) -if(NOT NETCDF_ENABLE_REMOTE_FUNCTIONALITY) +if(NOT NETCDF_ENABLE_REMOTE_FUNCTIONALITY AND NETCDF_ENABLE_DAP) message(WARNING "NETCDF_ENABLE_REMOTE_FUNCTIONALITY=NO => NETCDF_ENABLE_DAP[4]=NO") set(NETCDF_ENABLE_DAP OFF CACHE BOOL "NETCDF_ENABLE_REMOTE_FUNCTIONALITY=NO => NETCDF_ENABLE_DAP=NO" FORCE) set(NETCDF_ENABLE_DAP2 OFF CACHE BOOL "NETCDF_ENABLE_REMOTE_FUNCTIONALITY=NO => NETCDF_ENABLE_DAP2=NO" FORCE) @@ -605,7 +610,7 @@ endif() # Option to support byte-range reading of remote datasets option(NETCDF_ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." ${NETCDF_ENABLE_DAP}) -if(NOT NETCDF_ENABLE_REMOTE_FUNCTIONALITY) +if(NOT NETCDF_ENABLE_REMOTE_FUNCTIONALITY AND NETCDF_ENABLE_BYTERANGE) message(WARNING "NETCDF_ENABLE_REMOTE_FUNCTIONALITY=NO => NETCDF_ENABLE_BYTERANGE=NO") set(NETCDF_ENABLE_BYTERANGE OFF CACHE BOOL "NETCDF_ENABLE_REMOTE_FUNCTIONALITY=NO => NETCDF_ENABLE_BYTERANGE=NO" FORCE) endif() @@ -622,7 +627,7 @@ set(NETCDF_ENABLE_DAP_LONG_TESTS OFF CACHE BOOL "" FORCE) endif() # Provide a global control for remotetest. -if ("$ENV{REMOTETESTDOWN}" STREQUAL "yes") +if ("$ENV{REMOTETESTDOWN}" STREQUAL "yes" AND NETCDF_ENABLE_DAP_REMOTE_TESTS) message(WARNING "ENV(REMOTETESTDOWN) => NETCDF_ENABLE_DAP_REMOTE_TESTS == OFF") set(NETCDF_ENABLE_DAP_REMOTE_TESTS OFF CACHE BOOL "" FORCE) endif() @@ -699,7 +704,7 @@ else() # No option specified endif() # Try to enable NCZarr zip support -option(NETCDF_ENABLE_NCZARR_ZIP "Enable NCZarr ZIP support." OFF) +option(NETCDF_ENABLE_NCZARR_ZIP "Enable NCZarr ZIP support." ${NETCDF_ENABLE_NCZARR}) include(CMakeDependentOption) @@ -751,13 +756,18 @@ if(NOT WIN32) endif() # Options for S3 Support -option(NETCDF_ENABLE_S3 "Enable S3 support." OFF) +#option(NETCDF_ENABLE_S3 "Enable S3 support." OFF) +option(NETCDF_ENABLE_S3_AWS "Enable S3 support via AWS-CPP-SDK" OFF) option(NETCDF_ENABLE_S3_INTERNAL "Enable S3 Internal support." OFF) -option(NETCDF_ENABLE_NCZARR_S3 "Enable NCZarr S3 support; Deprecated in favor of NETCDF_ENABLE_S3" OFF) + +cmake_dependent_option(NETCDF_ENABLE_S3 "Enable S3 Support" ON "NETCDF_ENABLE_S3_AWS OR NETCDF_ENABLE_S3_INTERNAL" OFF) + +option(NETCDF_ENABLE_NCZARR_S3 "Enable NCZarr S3 support; Deprecated in favor of NETCDF_ENABLE_S3" ${NETCDF_ENABLE_S3}) if(NOT NETCDF_ENABLE_REMOTE_FUNCTIONALITY) set(NETCDF_ENABLE_S3 OFF CACHE BOOL "" FORCE) set(NETCDF_ENABLE_S3_INTERNAL OFF CACHE BOOL "" FORCE) + set(NETCDF_ENABLE_S3_AWS OFF CACHE BOOL "" FORCE) set(NETCDF_ENABLE_NCZARR_S3 OFF CACHE BOOL "" FORCE) endif() @@ -1053,8 +1063,8 @@ if (NOT NETCDF_ENABLE_PLUGINS AND NETCDF_ENABLE_NCZARR_FILTERS) set(NETCDF_ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Enable NCZarr Filters." FORCE) endif() -IF (NOT NETCDF_ENABLE_NCZARR) - message(WARNING "NETCDF_ENABLE_NCZARR==NO => NETCDF_ENABLE_NCZARR_FILTERS==NO") +IF (NOT NETCDF_ENABLE_NCZARR AND NETCDF_ENABLE_NCZARR_FILTERS) + message(WARNING "NETCDF_ENABLE_NCZARR==NO => NETCDF_ENABLE_NCZARR_FILTERS==NO") set(NETCDF_ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Disable NCZARR_FILTERS" FORCE) endif() @@ -1171,6 +1181,7 @@ CHECK_INCLUDE_file("io.h" HAVE_IO_H) endif(MSVC) CHECK_INCLUDE_file("stdlib.h" HAVE_STDLIB_H) CHECK_INCLUDE_file("ctype.h" HAVE_CTYPE_H) +CHECK_INCLUDE_file("sys/xattr_h" HAVE_SYS_XATTR_H) CHECK_INCLUDE_file("stdarg.h" HAVE_STDARG_H) CHECK_INCLUDE_file("strings.h" HAVE_STRINGS_H) CHECK_INCLUDE_file("signal.h" HAVE_SIGNAL_H) @@ -1322,6 +1333,7 @@ CHECK_FUNCTION_EXISTS(_filelengthi64 HAVE_FILE_LENGTH_I64) CHECK_FUNCTION_EXISTS(mmap HAVE_MMAP) CHECK_FUNCTION_EXISTS(mremap HAVE_MREMAP) CHECK_FUNCTION_EXISTS(fileno HAVE_FILENO) +CHECK_FUNCTION_EXISTS(H5Literate2 HAVE_H5LITERATE2) CHECK_FUNCTION_EXISTS(clock_gettime HAVE_CLOCK_GETTIME) CHECK_SYMBOL_EXISTS("struct timespec" "time.h" HAVE_STRUCT_TIMESPEC) @@ -1551,10 +1563,7 @@ endif() # STATIC_DEFINE netcdf_BUILT_AS_STATIC #) -##### -# Build doxygen documentation, if need be. -##### -add_subdirectory(docs) + ## # Brute force, grab all of the dlls from the dependency directory, @@ -1641,23 +1650,12 @@ if(NOT IS_DIRECTORY ${netCDF_BINARY_DIR}/tmp) file(MAKE_DIRECTORY ${netCDF_BINARY_DIR}/tmp) endif() -configure_file("${netCDF_SOURCE_DIR}/nc-config.cmake.in" - "${netCDF_BINARY_DIR}/tmp/nc-config" @ONLY - NEWLINE_STYLE LF) -file(COPY "${netCDF_BINARY_DIR}/tmp/nc-config" - DESTINATION ${netCDF_BINARY_DIR}/ - FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) - install(FILES ${netCDF_BINARY_DIR}/netcdf.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig COMPONENT utilities) -install(PROGRAMS ${netCDF_BINARY_DIR}/nc-config - DESTINATION ${CMAKE_INSTALL_BINDIR} - COMPONENT utilities) - ### -# End pkgconfig, nc-config file creation. +# End pkgconfig file creation. ### ## @@ -1765,6 +1763,8 @@ endif() # Copy the CTest customization file into binary directory, as required. configure_file("${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.cmake") +message(STATUS "STD_FILTERS: ${STD_FILTERS}") + # Generate file from template. configure_file("${CMAKE_CURRENT_SOURCE_DIR}/libnetcdf.settings.in" "${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings" @@ -1796,6 +1796,20 @@ install(FILES "${netCDF_BINARY_DIR}/libnetcdf.settings" # End libnetcdf.settings section. ##### +##### +# Create 'nc-config' file. +##### +configure_file("${netCDF_SOURCE_DIR}/nc-config.cmake.in" + "${netCDF_BINARY_DIR}/tmp/nc-config" @ONLY + NEWLINE_STYLE LF) +file(COPY "${netCDF_BINARY_DIR}/tmp/nc-config" + DESTINATION ${netCDF_BINARY_DIR}/ + FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) + +install(PROGRAMS ${netCDF_BINARY_DIR}/nc-config + DESTINATION ${CMAKE_INSTALL_BINDIR} + COMPONENT utilities) + ##### # Create 'netcdf_meta.h' include file. ##### @@ -1820,6 +1834,12 @@ set(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}") set(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}") configure_file(${CMAKE_CURRENT_SOURCE_DIR}/test_common.in ${CMAKE_CURRENT_BINARY_DIR}/test_common.sh @ONLY NEWLINE_STYLE LF) +##### +# Build doxygen documentation, if need be. +# This must come after setting top_builddir, etc. +##### +add_subdirectory(docs) + #### # Build s3cleanup.sh and s3gc.sh ##### diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 3e50b75cc1..bd35d2e33b 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -5,13 +5,22 @@ Release Notes {#RELEASE_NOTES} This file contains a high-level description of this package's evolution. Releases are in reverse chronological order (most recent first). Note that, as of netcdf 4.2, the `netcdf-c++` and `netcdf-fortran` libraries have been separated into their own libraries. +## 4.9.4 - TBD +* Add experimental support for the Zarr Version 3 storage format. This code willl change as the Zarr Version 3 Specification evolves. See [Github #????](https://github.com/Unidata/netcdf-c/pull/????). + ## 4.9.3 - TBD -* Add experimental support for the Zarr Version 3 storage format. This code willl change as the Zarr Version 3 Specification evolves. See [Github #????](https://github.com/Unidata/netcdf-c/pull/????). +* Provide better documentation for the .rc file mechanism and API. See [Github #2956](https://github.com/Unidata/netcdf-c/pull/2956) for more information. +* Convert NCZarr V2 to store all netcdf-4 specific info as attributes. This improves interoperability with other Zarr implementations by no longer using non-standard keys. The price to be paid is that lazy attribute reading cannot be supported. See [Github #2836](https://github.com/Unidata/netcdf-c/pull/2936) for more information. +* Cleanup the option code for NETCDF_ENABLE_SET_LOG_LEVEL\[_FUNC\] See [Github #2931](https://github.com/Unidata/netcdf-c/pull/2931) for more information. +* +### Release Candidate 1 - July 26, 2024 + +* Convert NCZarr V2 to store all netcdf-4 specific info as attributes. This improves interoperability with other Zarr implementations by no longer using non-standard keys. The price to be paid is that lazy attribute reading cannot be supported. See [Github #2836](https://github.com/Unidata/netcdf-c/issues/2936) for more information. * Cleanup the option code for NETCDF_ENABLE_SET_LOG_LEVEL\[_FUNC\] See [Github #2931](https://github.com/Unidata/netcdf-c/issues/2931) for more information. * Fix duplicate definition when using aws-sdk-cpp. See [Github #2928](https://github.com/Unidata/netcdf-c/issues/2928) for more information. -* Cleanup various obsolete options and do some code refactoring. See [Github #2926](https://github.com/Unidata/netcdf-c/issues/2926) for more information. -* Convert the Zarr-related ENABLE_XXX options to NETCDF_ENABLE_XXX options (part of the cmake overhaul). See [Github #2923](https://github.com/Unidata/netcdf-c/issues/2923) for more information. +* Cleanup various obsolete options and do some code refactoring. See [Github #2926](https://github.com/Unidata/netcdf-c/pull/2926) for more information. +* Convert the Zarr-related ENABLE_XXX options to NETCDF_ENABLE_XXX options (part of the cmake overhaul). See [Github #2923](https://github.com/Unidata/netcdf-c/pull/2923) for more information. * Refactor macro `_FillValue` to `NC_FillValue` to avoid conflict with libc++ headers. See [Github #2858](https://github.com/Unidata/netcdf-c/issues/2858) for more information. * Changed `cmake` build options to be prefaced with `NETCDF`, to bring things in to line with best practices. This will permit a number of overall quality of life improvements to netCDF, in terms of allowing it to be more easily integrated with upstream projects via `FetchContent()`, `subdirectory()`, etc. Currently, the naming convention in use thus far will still work, but will result in warning messages about deprecation, and instructions on how to update your workflow. See [Github #2895](https://github.com/Unidata/netcdf-c/pull/2895) for more information. * Incorporate a more modern look and feel to user documentation generated by Doxygen. See [Doxygen Awesome CSS](https://github.com/jothepro/doxygen-awesome-css) and [Github #2864](https://github.com/Unidata/netcdf-c/pull/2864) for more information. diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 89d1555289..6aaf3d8623 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -134,8 +134,13 @@ if(USE_HDF5) ##### # First, find the C and HL libraries. ##### - find_package(HDF5 ${HDF5_VERSION_REQUIRED} COMPONENTS C HL REQUIRED) + find_package(HDF5 COMPONENTS C HL REQUIRED) + message(STATUS "Found HDF5 version: ${HDF5_VERSION}") + if(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) + message(FATAL_ERROR "NetCDF requires HDF5 version ${HDF5_VERSION_REQUIRED} or later; found version ${HDF5_VERSION}.") + endif() + message(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIRS}") target_link_libraries(netcdf PRIVATE @@ -364,6 +369,7 @@ endif() ################################ # Zips ################################ +MESSAGE(STATUS "Checking for filter libraries") IF (NETCDF_ENABLE_FILTER_SZIP) find_package(Szip) elseif(NETCDF_ENABLE_NCZARR) @@ -380,31 +386,40 @@ IF (NETCDF_ENABLE_FILTER_ZSTD) endif() # Accumulate standard filters -set(STD_FILTERS "deflate") # Always have deflate*/ +#set(STD_FILTERS "bz2") +set(FOUND_STD_FILTERS "") +if(ENABLE_ZLIB) + set(STD_FILTERS "deflate") +endif() set_std_filter(Szip) set(HAVE_SZ ${Szip_FOUND}) set(USE_SZIP ${HAVE_SZ}) set_std_filter(Blosc) if(Zstd_FOUND) set_std_filter(Zstd) - set(HAVE_ZSTD ON) endif() if(Bz2_FOUND) set_std_filter(Bz2) else() # The reason we use a local version is to support a more comples test case - message("libbz2 not found using built-in version") + message("libbz2 not found using built-in version") set(HAVE_LOCAL_BZ2 ON) set(HAVE_BZ2 ON CACHE BOOL "") set(STD_FILTERS "${STD_FILTERS} bz2") endif() +set(STD_FILTERS "${STD_FILTERS}${FOUND_STD_FILTERS}") IF (NETCDF_ENABLE_NCZARR_ZIP) - find_package(Zip REQUIRED) - target_include_directories(netcdf - PRIVATE + find_package(Zip) + if(Zip_FOUND) + target_include_directories(netcdf + PRIVATE ${Zip_INCLUDE_DIRS} - ) + ) + else() + message(STATUS "libzip development package not found, disabling NETCDF_ENABLE_NCZARR_ZIP") + set(NETCDF_ENABLE_NCZARR_ZIP OFF CACHE BOOL "Enable NCZARR_ZIP functionality." FORCE) + endif() endif () ################################ @@ -414,7 +429,7 @@ endif () # because for some reason this screws up if we unconditionally test for sdk # and it is not available. Fix someday if(NETCDF_ENABLE_S3) - if(NOT NETCDF_ENABLE_S3_INTERNAL) + if(NETCDF_ENABLE_S3_AWS) # See if aws-s3-sdk is available find_package(AWSSDK REQUIRED COMPONENTS s3;transfer) if(AWSSDK_FOUND) @@ -426,7 +441,7 @@ if(NETCDF_ENABLE_S3) else(AWSSDK_FOUND) set(NETCDF_ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) endif(AWSSDK_FOUND) - else(NOT NETCDF_ENABLE_S3_INTERNAL) + else(NETCDF_ENABLE_S3_INTERNAL) # Find crypto libraries required with testing with the internal s3 api. #find_library(SSL_LIB NAMES ssl openssl) find_package(OpenSSL REQUIRED) @@ -439,7 +454,7 @@ if(NETCDF_ENABLE_S3) # message(FATAL_ERROR "Can't find a crypto library, required by S3_INTERNAL") #endif(NOT CRYPTO_LIB) - endif(NOT NETCDF_ENABLE_S3_INTERNAL) + endif(NETCDF_ENABLE_S3_AWS) else() set(NETCDF_ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) endif() diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake index 89c9388a5f..5c8566bd1f 100644 --- a/cmake/netcdf_functions_macros.cmake +++ b/cmake/netcdf_functions_macros.cmake @@ -2,26 +2,25 @@ # Macros ################################ -macro(set_std_filter filter) +function(set_std_filter filter) # Upper case the filter name string(TOUPPER "${filter}" upfilter) string(TOLOWER "${filter}" downfilter) - if(ENABLE_FILTER_${upfilter}) + if(NETCDF_ENABLE_FILTER_${upfilter}) # Define a test flag for filter if(${filter}_FOUND) include_directories(${${filter}_INCLUDE_DIRS}) - set(ENABLE_${upfilter} TRUE) - set(HAVE_${upfilter} ON) - set(STD_FILTERS "${STD_FILTERS} ${downfilter}") - message(">>> Standard Filter: ${downfilter}") + set(NETCDF_ENABLE_${upfilter} TRUE CACHE BOOL "Enable ${upfilter}") + set(HAVE_${upfilter} ON CACHE BOOL "Have ${upfilter}") + set(FOUND_STD_FILTERS "${FOUND_STD_FILTERS} ${downfilter}" PARENT_SCOPE) else() - set(ENABLE_${upfilter} FALSE) - set(HAVE_${upfilter} OFF) + set(NETCDF_ENABLE_${upfilter} FALSE CACHE BOOL "Enable ${upfilter}" FORCE) + set(HAVE_${upfilter} OFF CACHE BOOL "Have ${upfilter}" FORCE) endif() else() - set(HAVE_${upfilter} OFF) + set(HAVE_${upfilter} OFF CACHE BOOL "Have ${upfilter}" FORCE) endif() -endmacro(set_std_filter) +endfunction(set_std_filter) macro(getuname name flag) execute_process(COMMAND "${UNAME}" "${flag}" OUTPUT_VARIABLE "${name}" OUTPUT_STRIP_TRAILING_WHITESPACE) diff --git a/config.h.cmake.in b/config.h.cmake.in index 65bc7cd811..1f04a3c79e 100644 --- a/config.h.cmake.in +++ b/config.h.cmake.in @@ -379,6 +379,12 @@ are set when opening a binary file on Windows. */ /* Define to 1 if you have the header file. */ #cmakedefine HAVE_CTYPE_H 1 +/* Define to 1 if you have the getfattr command line utility. */ +#cmakedefine HAVE_GETFATTR 1 + +/* Define to 1 if you have the header file. */ +#cmakedefine HAVE_SYS_XATTR_H + /* Define to 1 if you have the header file. */ #cmakedefine HAVE_STRINGS_H 1 diff --git a/configure.ac b/configure.ac index 9b77c2769e..3807828e62 100644 --- a/configure.ac +++ b/configure.ac @@ -10,7 +10,7 @@ AC_PREREQ([2.59]) # Initialize with name, version, and support email address. -AC_INIT([netCDF],[4.9.3-development],[support-netcdf@unidata.ucar.edu],[netcdf-c]) +AC_INIT([netCDF],[4.9.4-development],[support-netcdf@unidata.ucar.edu],[netcdf-c]) ## # Prefer an empty CFLAGS variable instead of the default -g -O2. @@ -21,7 +21,7 @@ AC_INIT([netCDF],[4.9.3-development],[support-netcdf@unidata.ucar.edu],[netcdf-c AC_SUBST([netCDF_VERSION_MAJOR]) netCDF_VERSION_MAJOR=4 AC_SUBST([netCDF_VERSION_MINOR]) netCDF_VERSION_MINOR=9 -AC_SUBST([netCDF_VERSION_PATCH]) netCDF_VERSION_PATCH=3 +AC_SUBST([netCDF_VERSION_PATCH]) netCDF_VERSION_PATCH=4 AC_SUBST([NC_VERSION_NOTE]) NC_VERSION_NOTE="-development" ## @@ -29,7 +29,7 @@ AC_SUBST([NC_VERSION_NOTE]) NC_VERSION_NOTE="-development" # See http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning # for information regarding incrementing `-version-info`. # These values should match those in CMakeLists.txt -AC_SUBST([netCDF_SO_VERSION]) netCDF_SO_VERSION=21:2:2 +AC_SUBST([netCDF_SO_VERSION]) netCDF_SO_VERSION=22:0:0 ##### # Set some variables used to generate a libnetcdf.settings file, @@ -82,6 +82,14 @@ AC_LANG_POP([C]) if test $have_no_strict_aliasing = no; then CFLAGS=$SAVE_CFLAGS fi + +## +# Check to see if we have getfattr +## +AC_CHECK_PROGS([HAVE_GETFATTR], [getfattr]) +if test -n "$HAVE_GETFATTR"; then + AC_DEFINE_UNQUOTED([HAVE_GETFATTR],[1],[getfattr is available]) +fi ## # Some files need to exist in build directories # that do not correspond to their source directory, or @@ -1330,7 +1338,7 @@ AC_CHECK_HEADERS([sys/param.h]) AC_CHECK_HEADERS([libgen.h]) #AC_CHECK_HEADERS([locale.h]) AC_HEADER_STDC -AC_CHECK_HEADERS([locale.h stdio.h stdarg.h fcntl.h malloc.h stdlib.h string.h strings.h unistd.h sys/stat.h getopt.h sys/time.h sys/types.h time.h dirent.h stdint.h ctype.h]) +AC_CHECK_HEADERS([locale.h stdio.h stdarg.h fcntl.h malloc.h stdlib.h string.h strings.h unistd.h sys/stat.h getopt.h sys/time.h sys/types.h time.h dirent.h stdint.h ctype.h sys/xattr_h]) # Do sys/resource.h separately #AC_CHECK_HEADERS([sys/resource.h],[havesysresource=1],[havesysresource=0]) @@ -1648,13 +1656,13 @@ if test "x$enable_hdf5" = xyes; then # H5Pset_fapl_mpiposix and H5Pget_fapl_mpiposix have been removed since HDF5 1.8.12. # Use H5Pset_fapl_mpio and H5Pget_fapl_mpio, instead. - AC_CHECK_FUNCS([H5Pget_fapl_mpio H5Pset_deflate H5Z_SZIP H5Pset_all_coll_metadata_ops H5Literate]) + AC_CHECK_FUNCS([H5Pget_fapl_mpio H5Pset_deflate H5Z_SZIP H5Pset_all_coll_metadata_ops H5Literate H5Literate2]) # Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0) if test "x$ac_cv_func_H5Pset_all_coll_metadata_ops" = xyes; then AC_DEFINE([HDF5_HAS_COLL_METADATA_OPS], [1], [if true, use collective metadata ops in parallel netCDF-4]) fi - + # If parallel is available in hdf5, enable it in the C code. Also add some stuff to netcdf.h. if test "x$ac_cv_func_H5Pget_fapl_mpio" = xyes -o "x$ac_cv_func_H5Pget_fapl_mpiposix" = xyes; then hdf5_parallel=yes diff --git a/docs/.gitignore b/docs/.gitignore index e69de29bb2..afe7bb1c16 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -0,0 +1,6 @@ +Doxyfile +custom_style.css +latex_main +man +Doxyfile.bak + diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt index 75b5820d7a..694a00c1b4 100644 --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@ -34,7 +34,9 @@ IF(NETCDF_ENABLE_DOXYGEN) # Generate User Documentation ADD_CUSTOM_TARGET(doc_all ALL - ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile + COMMAND ${DOXYGEN_EXECUTABLE} -u ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile + COMMAND ${DOXYGEN_EXECUTABLE} -w html ${CMAKE_CURRENT_BINARY_DIR}/header.html ${CMAKE_CURRENT_BINARY_DIR}/footer.html ${CMAKE_CURRENT_BINARY_DIR}/custom_stylesheet.html ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile + COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/auth.html ${CMAKE_CURRENT_BINARY_DIR}/html COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/obsolete/fan_utils.html ${CMAKE_CURRENT_BINARY_DIR}/html WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} @@ -96,6 +98,6 @@ inmemory.md DAP2.dox FAQ.md known_problems.md COPYRIGHT.dox user_defined_formats.md DAP4.md DAP4.dox testserver.dox byterange.md filters.md nczarr.md auth.md quantize.md -quickstart_paths.md cloud.md) +quickstart_paths.md quickstart_filters.md quickstart_env.md cloud.md) ADD_EXTRA_DIST("${CUR_EXTRA_DIST}") diff --git a/docs/Doxyfile.developer b/docs/Doxyfile.developer index e384408a1f..4d8761780c 100644 --- a/docs/Doxyfile.developer +++ b/docs/Doxyfile.developer @@ -38,7 +38,7 @@ PROJECT_NAME = netCDF-C # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 4.9.3-development +PROJECT_NUMBER = 4.9.4-development # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/docs/Doxyfile.in b/docs/Doxyfile.in index c93fe1a9cd..e6e6c0e5ff 100644 --- a/docs/Doxyfile.in +++ b/docs/Doxyfile.in @@ -945,6 +945,7 @@ INPUT = @abs_top_srcdir@/docs/mainpage.dox \ @abs_top_srcdir@/docs/windows-binaries.md \ @abs_top_srcdir@/docs/inmemory.md \ @abs_top_srcdir@/docs/byterange.md \ + @abs_top_srcdir@/docs/auth.md \ @abs_top_srcdir@/docs/nczarr.md \ @abs_top_srcdir@/docs/cloud.md \ @abs_top_srcdir@/docs/notes.md \ @@ -965,6 +966,7 @@ INPUT = @abs_top_srcdir@/docs/mainpage.dox \ @abs_top_srcdir@/docs/filters.md \ @abs_top_srcdir@/docs/quickstart_filters.md \ @abs_top_srcdir@/docs/quickstart_paths.md \ + @abs_top_srcdir@/docs/quickstart_env.md \ @abs_top_srcdir@/include/netcdf.h \ @abs_top_srcdir@/include/netcdf_mem.h \ @abs_top_srcdir@/include/netcdf_par.h \ @@ -1352,7 +1354,7 @@ HTML_FILE_EXTENSION = .html # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_HEADER = @abs_top_srcdir@/docs/header.html +HTML_HEADER = @abs_top_builddir@/docs/header.html # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard @@ -1362,7 +1364,7 @@ HTML_HEADER = @abs_top_srcdir@/docs/header.html # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_FOOTER = @abs_top_srcdir@/docs/footer.html +HTML_FOOTER = @abs_top_builddir@/docs/footer.html # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of diff --git a/docs/Makefile.am b/docs/Makefile.am index ba3e41cbb7..3bacd2d08d 100644 --- a/docs/Makefile.am +++ b/docs/Makefile.am @@ -15,7 +15,7 @@ obsolete/fan_utils.html indexing.dox inmemory.md FAQ.md \ known_problems.md COPYRIGHT.md inmeminternal.dox testserver.dox \ byterange.md nczarr.md quantize.md all-error-codes.md \ quickstart_paths.md cloud.md header.html attribute_conventions.md \ -file_format_specifications.md quickstart_filters.md \ +file_format_specifications.md quickstart_filters.md quickstart_env.md \ doxygen-awesome-css netcdf-50x50.png # Turn off parallel builds in this directory. @@ -56,7 +56,11 @@ endif # includes them in the documentation. doxyfile.stamp: - $(DOXYGEN) Doxyfile && cp $(top_srcdir)/docs/auth.md $(top_srcdir)/docs/obsolete/fan_utils.html html + $(DOXYGEN) -u $(top_builddir)/docs/Doxyfile && \ + $(DOXYGEN) -w html $(top_builddir)/docs/header.html \ + $(top_builddir)/docs/footer.html $(top_builddir)/docs/custom_style.css $(top_builddir)/docs/Doxyfile && \ + $(DOXYGEN) $(top_builddir)/docs/Doxyfile && \ + cp $(top_srcdir)/docs/auth.md $(top_srcdir)/docs/obsolete/fan_utils.html html CLEANFILES = doxyfile.stamp diff --git a/docs/auth.md b/docs/auth.md index dfb49b3265..392add7452 100644 --- a/docs/auth.md +++ b/docs/auth.md @@ -1,21 +1,18 @@ NetCDF Authorization Support -====================================== +==================================== -NetCDF Authorization Support {#auth} -==================================== - [TOC] -## Introduction {#auth_intro} +## Introduction {#auth} netCDF can support user authorization using the facilities provided by the curl library. This includes basic password authentication as well as certificate-based authorization. At the moment, this document only applies to DAP2 and DAP4 access. -With some exceptions (e.g. see the section on redirection) +With some exceptions (e.g. see the section on redirection) The libcurl authorization mechanisms can be accessed in two ways 1. Inserting the username and password into the url, or @@ -33,7 +30,7 @@ This username and password will be used if the server asks for authentication. Note that only simple password authentication is supported in this format. -Specifically note that [redirection-based](#REDIR) +Specifically note that [redirection-based](#auth_redir) authorization may work with this but it is a security risk. This is because the username and password may be sent to each server in the redirection chain. @@ -44,67 +41,12 @@ how to properly escape the user and password. ## RC File Authentication {#auth_dodsrc} The netcdf library supports an _rc_ file mechanism to allow the passing -of a number of parameters to libnetcdf and libcurl. -Locating the _rc_ file is a multi-step process. - -### Search Order - -The netcdf-c library searches for, and loads from, the following files, -in this order: -1. $HOME/.ncrc -2. $HOME/.dodsrc -3. $CWD/.ncrc -4. $CWD/.dodsrc - -*$HOME* is the user's home directory and *$CWD* is the current working directory. Entries in later files override any of the earlier files - -It is strongly suggested that you pick a uniform location and a uniform name -and use them always. Otherwise you may observe unexpected results -when the netcdf-c library loads an rc file you did not expect. - -### RC File Format - -The rc file format is a series of lines of the general form: - - []= - -where the bracket-enclosed host:port is optional. - -### URL Constrained RC File Entries - -Each line of the rc file can begin with a host+port enclosed in -square brackets. The form is "host:port". If the port is not -specified then the form is just "host". The reason that more of -the url is not used is that libcurl's authorization grain is not -any finer than host level. - -Here are some examples. -```` - [remotetest.unidata.ucar.edu]HTTP.VERBOSE=1 -or - [fake.ucar.edu:9090]HTTP.VERBOSE=0 -```` -If the url request from, say, the _netcdf_open_ method -has a host+port matching one of the prefixes in the rc file, then -the corresponding entry will be used, otherwise ignored. -This means that an entry with a matching host+port will take -precedence over an entry without a host+port. - -For example, the URL -```` - http://remotetest.unidata.ucar.edu/thredds/dodsC/testdata/testData.nc -```` -will have HTTP.VERBOSE set to 1 because its host matches the example above. - -Similarly, -```` - http://fake.ucar.edu:9090/dts/test.01 -```` -will have HTTP.VERBOSE set to 0 because its host+port matches the example above. +of a number of run-time parameters to libnetcdf and libcurl. +This is described in the file "quickstart_env.md". ## Authorization-Related Keys {#auth_keys} -The currently defined set of authorization-related keys are as follows. +The currently defined set of authorization-related keys recognized in .netrc are as follows. The second column is the affected curl_easy_setopt option(s), if any (see reference #1). @@ -137,7 +79,7 @@ This is an alternative to setting it in the url. The value must be of the form "username:password". See the password escaping section to see how this value must escape certain characters. -Also see redirection authorization +Also see redirection authorization for important additional information. The pair of keys @@ -146,7 +88,7 @@ can be used as an alternative to HTTP.CREDENTIALS.USERPASSWORD to set the simple password authentication. If present, they take precedence over HTTP.CREDENTIALS.USERPASSWORD. The values do not need to be escaped. -See redirection authorization +See redirection authorization for important additional information. ### Cookie Jar @@ -157,7 +99,7 @@ to read cookies (CURLOPT_COOKIEJAR) and also the file into which to store cookies (CURLOPT_COOKIEFILE). The same value is used for both CURLOPT values. It defaults to in-memory storage. -See [redirection authorization](#REDIR) +See [redirection authorization](#auth_redir) for important additional information. ### Certificate Authentication @@ -191,7 +133,7 @@ deprecated; use HTTP.PROXY.SERVER HTTP.NETRC specifies the absolute path of the .netrc file, and causes it to be used instead of username and password. -See [redirection authorization](#REDIR) +See [redirection authorization](#auth_redir) for information about using *.netrc*. ## Password Escaping {#auth_userpwdescape} @@ -289,7 +231,7 @@ machine urs.earthdata.nasa.gov login password Some systems, notably ESG (Earth System Grid), requires the use of client-side certificates, as well as being -[re-direction based](#REDIR). +[re-direction based](#auth_redir). This requires setting the following entries: - HTTP.COOKIEJAR — a file path for storing cookies across re-direction. @@ -306,7 +248,7 @@ Note that the first two are there to support re-direction based authentication. 1. https://curl.haxx.se/libcurl/c/curl_easy_setopt.html 2. https://curl.haxx.se/docs/ssl-compared.html -## Appendix A. All RC-File Keys {#auth_allkeys} +## Authorization Appendix A. All RC-File Keys {#auth_allkeys} For completeness, this is the list of all rc-file keys. If this documentation is out of date with respect to the actual code, @@ -333,21 +275,21 @@ the code is definitive.
HTTP.NETRCCURLOPT_NETRC,CURLOPT_NETRC_FILE
-## Appendix B. URS Access in Detail {#auth_ursdetail} +## Authorization Appendix B. URS Access in Detail {#auth_ursdetail} It is possible to use the NASA Earthdata Login System (URS) with netcdf by using using the process specified in the -[redirection based authorization section](#REDIR). +[redirection based authorization section](#auth_redir). In order to access URS controlled datasets, however, it is necessary to register as a user with NASA at this website (subject to change): https://uat.urs.earthdata.nasa.gov/ -## Appendix C. ESG Access in Detail {#auth_esgdetail} +## Authorization Appendix C. ESG Access in Detail {#auth_esgdetail} It is possible to access Earth Systems Grid (ESG) datasets from ESG servers through the netCDF API using the techniques -described in the section on [Client-Side Certificates](#CLIENTCERTS). +described in the section on [Client-Side Certificates](#auth_clientcerts). In order to access ESG datasets, however, it is necessary to register as a user with ESG and to setup your environment @@ -430,7 +372,7 @@ named "truststore" ### Running the C Client -Refer to the section on [Client-Side Certificates](#CLIENTCERTS). +Refer to the section on [Client-Side Certificates](#auth_clientcerts). The keys specified there must be set in the rc file to support ESG access. - HTTP.COOKIEJAR=~/.dods_cookies diff --git a/docs/filters.md b/docs/filters.md index 3d69a9348e..fec847534b 100644 --- a/docs/filters.md +++ b/docs/filters.md @@ -368,30 +368,30 @@ The details for writing such a filter are defined in the HDF5 documentation[1,2] The HDF5 loader searches for plugins in a number of directories. This search is contingent on the presence or absence of the environment -variable named ***HDF5_PLUGIN_PATH***. +variable named ***HDF5\_PLUGIN\_PATH***. As with all other "...PATH" variables, it is a sequence of absolute directories separated by a separator character. For *nix* operating systems, this separator is the colon (':') character. For Windows and Mingw, the separator is the semi-colon (';') character. So for example: -* Linux: export HDF5_PLUGIN_PATH=/usr/lib:/usr/local/lib -* Windows: export HDF5_PLUGIN_PATH=c:\\ProgramData\\hdf5\\plugin;c:\\tools\\lib +* Linux: `export HDF5_PLUGIN_PATH=/usr/lib:/usr/local/lib` +* Windows: `export HDF5_PLUGIN_PATH=c:\\ProgramData\\hdf5\\plugin;c:\\tools\\lib` -If HDF5_PLUGIN_PATH is defined, then the loader will search each directory +If HDF5\_PLUGIN\_PATH is defined, then the loader will search each directory in the path from left to right looking for shared libraries with specific exported symbols representing the entry points into the library. -If HDF5_PLUGIN_PATH is not defined, the loader defaults to using +If HDF5\_PLUGIN\_PATH is not defined, the loader defaults to using these default directories: -* Linux: /usr/local/hdf5/lib/plugin -* Windows: %ALLUSERSPROFILE%\\hdf5\\lib\\plugin +* Linux: `/usr/local/hdf5/lib/plugin` +* Windows: `%ALLUSERSPROFILE%\\hdf5\\lib\\plugin` It should be noted that there is a difference between the search order for HDF5 versus NCZarr. The HDF5 loader will search only the directories -specificed in HDF5_PLUGIN_PATH. In NCZarr, the loader -searches HDF5_PLUGIN_PATH and as a last resort, +specificed in HDF5\_PLUGIN\_PATH. In NCZarr, the loader +searches HDF5\_PLUGIN\_PATH and as a last resort, it also searches the default directory. ### Plugin Library Naming {#filters_Pluginlib} @@ -637,17 +637,17 @@ to point to that directory or you may be able to copy the shared libraries out o As of NetCDF version 4.8.2, the netcdf-c library supports bit-grooming filters. -```` -Bit-grooming is a lossy compression algorithm that removes the -bloat due to false-precision, those bits and bytes beyond the -meaningful precision of the data. Bit Grooming is statistically -unbiased, applies to all floating point numbers, and is easy to -use. Bit-Grooming reduces data storage requirements by -25-80%. Unlike its best-known competitor Linear Packing, Bit -Grooming imposes no software overhead on users, and guarantees -its precision throughout the whole floating point range -[https://doi.org/10.5194/gmd-9-3199-2016]. -```` + + Bit-grooming is a lossy compression algorithm that removes the + bloat due to false-precision, those bits and bytes beyond the + meaningful precision of the data. Bit Grooming is statistically + unbiased, applies to all floating point numbers, and is easy to + use. Bit-Grooming reduces data storage requirements by + 25-80%. Unlike its best-known competitor Linear Packing, Bit + Grooming imposes no software overhead on users, and guarantees + its precision throughout the whole floating point range + [https://doi.org/10.5194/gmd-9-3199-2016]. + The generic term "quantize" is used to refer collectively to the various precision-trimming algorithms. The key thing to note about quantization is that it occurs at the point of writing of data only. Since its output is @@ -656,18 +656,20 @@ Because of this, quantization is not part of the standard filter mechanism and has a separate API. The API for bit-groom is currently as follows. -```` + +``` int nc_def_var_quantize(int ncid, int varid, int quantize_mode, int nsd); int nc_inq_var_quantize(int ncid, int varid, int *quantize_modep, int *nsdp); -```` +``` The *quantize_mode* argument specifies the particular algorithm. -Currently, three are supported: NC_QUANTIZE_BITGROOM, NC_QUANTIZE_GRANULARBR, -and NC_QUANTIZE_BITROUND. In addition quantization can be disabled using -the value NC_NOQUANTIZE. +Currently, three are supported: NC\_QUANTIZE\_BITGROOM, NC\_QUANTIZE\_GRANULARBR, +and NC\_QUANTIZE\_BITROUND. In addition quantization can be disabled using +the value NC\_NOQUANTIZE. The input to ncgen or the output from ncdump supports special attributes to indicate if quantization was applied to a given variable. These attributes have the following form. + ```` _QuantizeBitGroomNumberOfSignificantDigits = or @@ -830,15 +832,16 @@ If you do not want to use Automake or Cmake, the following has been known to wor ## References {#filters_References} -1. https://support.hdfgroup.org/HDF5/doc/Advanced/DynamicallyLoadedFilters/HDF5DynamicallyLoadedFilters.pdf -2. https://support.hdfgroup.org/HDF5/doc/TechNotes/TechNote-HDF5-CompressionTroubleshooting.pdf -3. https://portal.hdfgroup.org/display/support/Registered+Filter+Plugins -4. https://support.hdfgroup.org/services/contributions.html#filters -5. https://support.hdfgroup.org/HDF5/doc/RM/RM\_H5.html -6. https://confluence.hdfgroup.org/display/HDF5/Filters -7. https://numcodecs.readthedocs.io/en/stable/ -8. https://github.com/ccr/ccr -9. https://escholarship.org/uc/item/7xd1739k +1. [https://support.hdfgroup.org/HDF5/doc/Advanced/DynamicallyLoadedFilters/HDF5DynamicallyLoadedFilters.pdf]() +2. [https://support.hdfgroup.org/HDF5/doc/TechNotes/TechNote-HDF5-CompressionTroubleshooting.pdf]() +3.[ https://portal.hdfgroup.org/display/support/Registered+Filter+Plugins]() +4. [https://support.hdfgroup.org/services/contributions.html#filters]() +5. [https://support.hdfgroup.org/HDF5/doc/RM/RM\_H5.html]() +6. [https://confluence.hdfgroup.org/display/HDF5/Filters +]() +7. [https://numcodecs.readthedocs.io/en/stable/]() +8. [https://github.com/ccr/ccr]() +9. [https://escholarship.org/uc/item/7xd1739k]() ## Appendix A. HDF5 Parameter Encode/Decode {#filters_appendixa} @@ -945,6 +948,7 @@ Examples of the use of these functions can be seen in the test program *nc\_test Some of the above functions use a C struct defined in *netcdf\_filter.h\_. The definition of that struct is as follows. + ```` typedef struct NC_H5_Filterspec { unsigned int filterid; /* ID for arbitrary filter. */ @@ -1219,12 +1223,14 @@ WARNING: the installer still needs to make sure that the actual filter/compressi The target location into which libraries in the "plugins" directory are installed is specified using a special *./configure* option + ```` --with-plugin-dir= or --with-plugin-dir ```` or its corresponding *cmake* option. + ```` -DPLUGIN_INSTALL_DIR= or @@ -1232,6 +1238,7 @@ or ```` This option defaults to the value "yes", which means that filters are installed by default. This can be disabled by one of the following options. + ```` --without-plugin-dir (automake) or @@ -1242,13 +1249,15 @@ or If the option is specified with no argument (automake) or with the value "YES" (CMake), then it defaults (in order) to the following directories: -1. If the HDF5_PLUGIN_PATH environment variable is defined, then last directory in the list of directories in the path is used. -2. (a) "/usr/local/hdf5/lib/plugin” for linux/unix operating systems (including Cygwin)
- (b) “%ALLUSERSPROFILE%\\hdf5\\lib\\plugin” for Windows and MinGW + +1. If the HDF5\_PLUGIN\_PATH environment variable is defined, then last directory in the list of directories in the path is used. +2. (a) `/usr/local/hdf5/lib/plugin` for linux/unix operating systems (including Cygwin)
+ (b) `%ALLUSERSPROFILE%\\hdf5\\lib\\plugin` for Windows and MinGW If NCZarr is enabled, then in addition to wrappers for the standard filters, additional libraries will be installed to support NCZarr access to filters. Currently, this list includes the following: + * shuffle — shuffle filter * fletcher32 — fletcher32 checksum * deflate — deflate compression @@ -1264,7 +1273,7 @@ provided by the *lib__nczh5filters.so* shared library. Note also that if you disable HDF5 support, but leave NCZarr support enabled, then all of the above filters should continue to work. -### HDF5_PLUGIN_PATH +### HDF5\_PLUGIN\_PATH At the moment, NetCDF uses the existing HDF5 environment variable *HDF5\_PLUGIN\_PATH* to locate the directories in which filter wrapper @@ -1277,17 +1286,17 @@ separated by a specific separator character. For Windows, the separator character is a semicolon (';') and for Unix, it is a a colon (':'). -So, if HDF5_PLUGIN_PATH is defined at build time, and +So, if HDF5\_PLUGIN\_PATH is defined at build time, and *--with-plugin-dir* is specified with no argument then the last directory in the path will be the one into which filter wrappers are installed. Otherwise the default directories are used. The important thing to note is that at run-time, there are several cases to consider: -1. HDF5_PLUGIN_PATH is defined and is the same value as it was at build time -- no action needed -2. HDF5_PLUGIN_PATH is defined and is has a different value from build time -- the user is responsible for ensuring that the run-time path includes the same directory used at build time, otherwise this case will fail. -3. HDF5_PLUGIN_DIR is not defined at either run-time or build-time -- no action needed -4. HDF5_PLUGIN_DIR is not defined at run-time but was defined at build-time -- this will probably fail +1. HDF5\_PLUGIN\_PATH is defined and is the same value as it was at build time -- no action needed +2. HDF5\_PLUGIN\_PATH is defined and is has a different value from build time -- the user is responsible for ensuring that the run-time path includes the same directory used at build time, otherwise this case will fail. +3. HDF5\_PLUGIN\_PATH is not defined at either run-time or build-time -- no action needed +4. HDF5\_PLUGIN\_PATH is not defined at run-time but was defined at build-time -- this will probably fail ## Appendix J. A Warning on Backward Compatibility {#filters_appendixj} @@ -1303,7 +1312,7 @@ inconvenience. A user may encounter an incompatibility if any of the following appears in user code. -* The function *\_nc\_inq\_var\_filter* was returning the error value NC\_ENOFILTER if a variable had no associated filters. +* The function *nc\_inq\_var\_filter* was returning the error value NC\_ENOFILTER if a variable had no associated filters. It has been reverted to the previous case where it returns NC\_NOERR and the returned filter id was set to zero if the variable had no filters. * The function *nc\_inq\_var\_filterids* was renamed to *nc\_inq\_var\_filter\_ids*. * Some auxilliary functions for parsing textual filter specifications have been moved to the file *netcdf\_aux.h*. See [Appendix A](#filters_appendixa). diff --git a/docs/footer.html b/docs/footer.html index 5b48940605..efa3357e59 100644 --- a/docs/footer.html +++ b/docs/footer.html @@ -1,21 +1,17 @@ - - - -