From 60021a0d0a7b2a23ed86b76d3eab0acaf34ef857 Mon Sep 17 00:00:00 2001 From: sameeul Date: Tue, 1 Aug 2023 10:29:53 -0400 Subject: [PATCH 1/9] Preparing 0.7.0 release --- CMakeLists.txt | 4 +- Dockerfile | 2 - plugin.json | 18 +- src/nyx/main_nyxushie.cpp | 366 -------------------------------------- src/nyx/version.h | 2 +- tests/CMakeLists.txt | 4 +- 6 files changed, 17 insertions(+), 379 deletions(-) delete mode 100644 src/nyx/main_nyxushie.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 9f8d5909..0d604b9d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,7 +25,9 @@ if(CUDA_FOUND AND USEGPU) else () # for now, we assume CUDA 11.2+ supports all these archs. set(CUDA_ARCH_LIST "35;37;50;72;75;80;86") endif() - else() + elseif (CUDA_VERSION_MAJOR STREQUAL "12") + set(CUDA_ARCH_LIST "52;60;72;75;80;86;89;90") + else() # some old CUDA version (<10) set(CUDA_ARCH_LIST "50") endif() elseif((NOT CUDA_FOUND) AND USEGPU) diff --git a/Dockerfile b/Dockerfile index 33308067..821c3150 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,10 +17,8 @@ COPY ${LIB_LOCATION}/*.so* ${LIB_DIR}/ #Copy executable COPY nyxus ${EXEC_DIR}/ -COPY nyxushie ${EXEC_DIR}/ RUN chmod +x ${EXEC_DIR}/nyxus -RUN chmod +x ${EXEC_DIR}/nyxushie WORKDIR ${EXEC_DIR} diff --git a/plugin.json b/plugin.json index a17f0e79..d3fa0f99 100644 --- a/plugin.json +++ b/plugin.json @@ -1,7 +1,7 @@ { "name": "Nyxus", - "version": "0.4.1", - "containerId": "polusai/nyxus:0.4.2", + "version": "0.7.0", + "containerId": "polusai/nyxus:0.7.0", "title": "Nyxus feature extraction", "description": "Nyxus feature extractor", "author": "friskluft + hsidky", @@ -1123,13 +1123,15 @@ "required": true }, { - "name": "csvFile", - "description": "csvfile : singlecsv for saving values in one csv file and separate csv to save values for each image in separate csv file", + "name": "outputType", + "description": "outputType : singlecsv for saving values in one csv file and separate csv to save values for each image in separate csv file, arrow and parquet for saving in those file formats respectively", "type": "enum", "options": { "values": [ "singlecsv", - "separatecsv" + "separatecsv", + "arrow", + "parquet" ] }, "required": true @@ -1187,9 +1189,9 @@ "description": "Select features" }, { - "key": "inputs.csvFile", - "title": "Output csv file", - "description": "Save csv file separately for each image or one csv file for all images" + "key": "inputs.outputType", + "title": "Output file type", + "description": "Save csv file separately for each image or one csv file for all images. Alternatively, outuput can be written as arrow or parquet file" }, { "key": "inputs.embeddedpixelsize", diff --git a/src/nyx/main_nyxushie.cpp b/src/nyx/main_nyxushie.cpp deleted file mode 100644 index 86b51a0b..00000000 --- a/src/nyx/main_nyxushie.cpp +++ /dev/null @@ -1,366 +0,0 @@ -#include -#include -#if __has_include() - #include - namespace fs = std::filesystem; -#elif __has_include() - #include - namespace fs = std::experimental::filesystem; -#else - error "Missing the header." -#endif -#include "version.h" -#include "dirs_and_files.h" -#include "environment_basic.h" -#include "globals.h" -#include "image_loader1x.h" -#include "nested_roi.h" -#include "nested_feature_aggregation.h" - -bool mine_segment_relations(bool output2python, const std::string& label_dir, const std::string& file_pattern, const std::string& channel_signature, const int parent_channel, const int child_channel, const std::string& outdir, const ChildFeatureAggregation& aggr, int verbosity_level); - -class NyxusHieEnvironment : public BasicEnvironment -{ -public: - NyxusHieEnvironment() {} -} theEnvironment; - - -namespace Nyxus -{ - -bool output_relational_table (const std::vector& P, const std::string& outdir) -{ - // Anything to do at all? - if (P.size() == 0) - return false; - - // Make the relational table file name - auto & fullSegImgPath = Nyxus::roiData1[P[0]].segFname; - fs::path pSeg(fullSegImgPath); - auto segImgFname = pSeg.stem().string(); - std::string fPath = outdir + "/" + segImgFname + "_nested_relations.csv"; // output file path - - // Debug - std::cout << "\nWriting relational structure to file " << fPath << "\n"; - - // Output <-- parent header - std::ofstream ofile; - ofile.open(fPath); - ofile << "Image,Parent_Label,Child_Label\n"; - - // Process parents - for (auto l_par : P) - { - HieLR& r = Nyxus::roiData1[l_par]; - for (auto l_chi : r.child_segs) - { - ofile << r.segFname << "," << l_par << "," << l_chi << "\n"; - } - } - - ofile.close(); - std::cout << "\nCreated file " << fPath << "\n"; - - return true; -} - -bool shape_all_parents (const std::vector & P, const std::string & outdir, const ChildFeatureAggregation & aggr) -{ - // Anything to do at all? - if (P.size() == 0) - return false; - - // Find the max # of child segments to know how many children columns we have across the image - size_t max_n_children = 0; - for (auto l_par : Nyxus::uniqueLabels1) - { - HieLR& r_par = Nyxus::roiData1[l_par]; - max_n_children = std::max(max_n_children, r_par.child_segs.size()); - } - - // Header - // -- Read any CSV file and extract its header, we'll need it multiple times - int lab_temp = P[0]; - HieLR& r_temp = Nyxus::roiData1 [lab_temp]; - std::string csvFP = outdir + "/" + r_temp.get_output_csv_fname(); - - if (!existsOnFilesystem(csvFP)) - { - std::cout << "Error: cannot access file " << csvFP << std::endl; - return false; - } - - std::string csvWholeline; - std::vector csvHeader, csvFields; - bool ok = find_csv_record (csvWholeline, csvHeader, csvFields, csvFP, lab_temp); - if (ok == false) - { - std::cout << "Cannot find record for parent " << lab_temp << " in " << csvFP << ". Quitting\n"; - return false; // pointless to continue if the very 1st parent is unavailable - } - - // Make the output table file name - auto& fullSegImgPath = Nyxus::roiData1[P[0]].segFname; - fs::path pSeg(fullSegImgPath); - auto segImgFname = pSeg.stem().string(); - std::string fPath = outdir + "/" + segImgFname + "_nested_features.csv"; // output file path - - // --diagnostic-- - std::cout << "\nWriting aligned nested features to file " << fPath << "\n"; - - // Output <-- parent header - std::string csvNFP = fPath; //---outdir + "/nested_features.csv"; // output file path - std::ofstream ofile; - ofile.open(csvNFP); - for (auto& field : csvHeader) - ofile << field << ","; - //--no line break now-- ofile << "\n"; - - // Iterate children - if (aggr.get_method() == aNONE) - { - // We are in the no-aggregation scenario - for (int iCh = 1; iCh <= max_n_children; iCh++) - { - // Output <-- child's header - for (auto& field : csvHeader) - ofile << "child_" << iCh << "_" << field << ","; - } - } - else - { - // We are in the AGGREGATION scenario - // Output <-- child's header - for (auto& field : csvHeader) - ofile << "aggr_" << field << ","; - } - ofile << "\n"; - - // Process parents - for (auto l_par : P) - { - HieLR& r = Nyxus::roiData1[l_par]; - std::string csvFP = outdir + "/" + r.get_output_csv_fname(); - //std::string csvWholeline; - //std::vector csvHeader, csvFields; - bool ok = find_csv_record(csvWholeline, csvHeader, csvFields, csvFP, l_par); - if (ok == false) - { - std::cout << "Cannot find record for parent " << l_par << " in " << csvFP << "\n"; - continue; - } - - // Output <-- parent features - for (auto& field : csvFields) - ofile << field << ","; - //-- don't break the line! children features will follow-- ofile << "\n"; - - if (aggr.get_method() == aNONE) - { - // write features of all the children without aggregation - int iCh = 1; - for (auto l_chi : r.child_segs) - { - HieLR& r_chi = Nyxus::roiData2[l_chi]; - std::string csvFN_chi = r_chi.get_output_csv_fname(); - std::string csvFP_chi = outdir + "/" + csvFN_chi; - std::string csvWholeline_chi; - bool ok = find_csv_record(csvWholeline_chi, csvHeader, csvFields, csvFP_chi, l_chi); - if (ok == false) - { - std::cout << "Cannot find record for child " << l_par << " in " << csvFP << "\n"; - continue; - } - - // Output <-- child features - - for (auto& field : csvFields) - ofile << field << ","; - //-- don't break the line either! more children features will follow-- ofile << "\n"; - - // childrens' count - iCh++; - } - // write empty cells if needed - if (iCh < max_n_children) - { - for (int iCh2 = iCh; iCh2 <= max_n_children; iCh2++) - { - for (auto& field : csvFields) - ofile << "0" << ","; // blank cell - } - } - } // no aggregation - else - { - // read and aggregate - std::vector> aggrBuf; - - int iCh = 1; - for (auto l_chi : r.child_segs) - { - HieLR& r_chi = Nyxus::roiData2[l_chi]; - std::string csvFN_chi = r_chi.get_output_csv_fname(); - std::string csvFP_chi = outdir + "/" + csvFN_chi; - std::string csvWholeline_chi; - bool ok = find_csv_record(csvWholeline_chi, csvHeader, csvFields, csvFP_chi, l_chi); - if (ok == false) - { - std::cout << "Cannot find record for child " << l_par << " in " << csvFP << "\n"; - continue; - } - - // Output <-- child features - std::vector childRow; - for (auto& field : csvFields) - { - // Parse a table cell value. (Difficulty - nans, infs, etc.) - float val = 0.0f; - parse_as_float (field, val); - childRow.push_back(val); //--- ofile << field << "," - } - aggrBuf.push_back(childRow); - - // childrens' count - iCh++; - } - - int n_chi = aggrBuf.size(); - - // write aggregated - //--first, aggregate - std::vector feaAggregates; - for (int fea = 0; fea < csvFields.size(); fea++) - { - double aggResult = 0.0; - switch (aggr.get_method()) - { - case aSUM: - for (int child = 0; child < n_chi; child++) - aggResult += aggrBuf[child][fea]; - break; - case aMEAN: - for (int child = 0; child < n_chi; child++) - aggResult += aggrBuf[child][fea]; - aggResult /= n_chi; - break; - case aMIN: - aggResult = aggrBuf[0][fea]; - for (int child = 0; child < n_chi; child++) - aggResult = std::min (aggrBuf[child][fea], aggResult); - break; - case aMAX: - aggResult = aggrBuf[0][fea]; - for (int child = 0; child < n_chi; child++) - aggResult = std::max(aggrBuf[child][fea], aggResult); - break; - default: // aWMA - for (int child = 0; child < n_chi; child++) - aggResult += aggrBuf[child][fea]; - aggResult /= n_chi; - break; - } - feaAggregates.push_back(aggResult); - } - //--second, write - for (int fea = 0; fea < csvFields.size(); fea++) - ofile << feaAggregates[fea] << ","; - } - - // Output <-- line break - ofile << "\n"; - } - - ofile.close(); - std::cout << "\nCreated file " << csvNFP << "\n"; - - return true; -} - -} // namespace Nyxus - - - -#define OPTION_AGGREGATE "-aggregate" - -int main (int argc, char** argv) -{ - std::cout << PROJECT_NAME << " /// " << PROJECT_VER << " /// (c) 2021-2022 Axle Informatics\t" << "Build of " << __TIMESTAMP__ << "\n"; - - // Process the command line: check the command line (straightforward way - strictly positional) - if (argc < 7) - { - std::cout << "nyxushie [" << OPTION_AGGREGATE << "=]\n" - << "\t is " << ChildFeatureAggregation::get_valid_options() << "\n"; - std::cout << "Example: nyxushie ~/data/image-collection1/seg train_.*\\.tif _ch 1 0 ~/results/result1 \n"; - return 1; - } - - // Process the command line: consume the mandatory arguments - std::string segCollectionDir = argv[1], - filePattern = argv[2], - channelSign = argv[3], - parentChannel = argv[4], - childChannel = argv[5], - resultFeaturesDir = argv[6]; - - // -- file pattern - if (!theEnvironment.check_file_pattern(filePattern)) - { - std::cerr << "Filepattern provided is not valid\n"; - return 1; - } - - // -- parent & child channel numbers - int n_parent_channel; - if (sscanf(parentChannel.c_str(), "%d", &n_parent_channel) != 1) - { - std::cerr << "Error parsing the parent channel number\n"; - return 1; - } - - int n_child_channel; - if (sscanf(childChannel.c_str(), "%d", &n_child_channel) != 1) - { - std::cerr << "Error parsing the child channel number\n"; - return 1; - } - - // Process the command line: check the the aggregation option - ChildFeatureAggregation aggr (OPTION_AGGREGATE); - if (argc == 8) - { - auto rawAggrArg = argv[7]; - if (!aggr.parse(rawAggrArg)) - { - std::cerr << "Error parsing the aggregation method argument " << rawAggrArg << " . Valid options are : " << OPTION_AGGREGATE << "=" << ChildFeatureAggregation::get_valid_options() << "\n"; - return 1; - } - } - - // Mine relations and leave the result in object 'theResultsCache' - bool mineOK = mine_segment_relations( - false, - segCollectionDir, - filePattern, - channelSign, - n_parent_channel, - n_child_channel, - resultFeaturesDir, - aggr, - theEnvironment.get_verbosity_level()); - if (! mineOK) - { - std::cerr << "Error minimg hierarchical relations\n"; - return 1; - } - - return 0; -} - -namespace Nyxus -{ - // Results cache serving Nyxus' CLI & Python API, NyxusHie's CLI & Python API - ResultsCache theResultsCache; -} \ No newline at end of file diff --git a/src/nyx/version.h b/src/nyx/version.h index dc180c90..021cf05c 100644 --- a/src/nyx/version.h +++ b/src/nyx/version.h @@ -2,6 +2,6 @@ #define VERSIONH_INCLUDED #define PROJECT_NAME "Nyxus" -#define PROJECT_VER "0.4.2" +#define PROJECT_VER "0.7.0" #endif // VERSIONH_INCLUDED diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 34564cac..8fa31636 100755 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -25,7 +25,9 @@ if(CUDA_FOUND AND USEGPU) else () # for now, we assume CUDA 11.2+ supports all these archs. set(CUDA_ARCH_LIST "35;37;50;72;75;80;86") endif() - else() + elseif (CUDA_VERSION_MAJOR STREQUAL "12") + set(CUDA_ARCH_LIST "52;60;72;75;80;86;89;90") + else() # some old CUDA version (<10) set(CUDA_ARCH_LIST "50") endif() elseif((NOT CUDA_FOUND) AND USEGPU) From 483f497b0ab8f1adaf6f0577f27ef2351df0de68 Mon Sep 17 00:00:00 2001 From: sameeul Date: Tue, 1 Aug 2023 12:00:11 -0400 Subject: [PATCH 2/9] add python 3.11 wheels --- .github/workflows/build_wheels.yml | 2 +- .github/workflows/publish_pypi.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index d46652db..e5dc1354 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -14,7 +14,7 @@ jobs: matrix: os: [ubuntu-20.04, macos-11, windows-latest] cibw_archs: ["auto64"] - cibw_build: ["cp37-*", "cp38-*", "cp39-*", "cp310-*"] + cibw_build: ["cp37-*", "cp38-*", "cp39-*", "cp310-*", "cp311-*"] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/publish_pypi.yml b/.github/workflows/publish_pypi.yml index 5fac8b1f..17b69ce2 100644 --- a/.github/workflows/publish_pypi.yml +++ b/.github/workflows/publish_pypi.yml @@ -16,7 +16,7 @@ jobs: matrix: os: [ubuntu-20.04, macos-11, windows-latest] cibw_archs: ["auto64"] - cibw_build: ["cp37-*", "cp38-*", "cp39-*", "cp310-*"] + cibw_build: ["cp37-*", "cp38-*", "cp39-*", "cp310-*", "cp311-*"] steps: - uses: actions/checkout@v3 From 67fee74e37463c5c16765848816187f79c5a99c7 Mon Sep 17 00:00:00 2001 From: sameeul Date: Tue, 1 Aug 2023 12:00:34 -0400 Subject: [PATCH 3/9] build with arrow support --- ci-utils/build_conda.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/ci-utils/build_conda.sh b/ci-utils/build_conda.sh index c6ac1894..33e9ff9f 100755 --- a/ci-utils/build_conda.sh +++ b/ci-utils/build_conda.sh @@ -70,6 +70,7 @@ cmake -DCMAKE_PREFIX_PATH=$CONDA_PREFIX \ -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX \ -DBUILD_CLI=ON \ -DUSEGPU=ON \ + -DUSEARROW=ON \ $NYXUS_ROOT cmake --build . --parallel 4 From dc5400a360f3da4f04ab06ba1e1ffb053dcf3f5e Mon Sep 17 00:00:00 2001 From: sameeul Date: Wed, 2 Aug 2023 12:59:14 -0400 Subject: [PATCH 4/9] update docker build --- .github/workflows/publish_docker.yml | 2 +- ci-utils/build_conda.sh | 4 +- ci-utils/docker_copy_3rd_party_libs.sh | 84 +++++++++++++++++++++++++- 3 files changed, 83 insertions(+), 7 deletions(-) diff --git a/.github/workflows/publish_docker.yml b/.github/workflows/publish_docker.yml index b23a74b1..71d777a8 100644 --- a/.github/workflows/publish_docker.yml +++ b/.github/workflows/publish_docker.yml @@ -28,7 +28,7 @@ jobs: - name: Copy 3rd-party libraries to staging directory working-directory: ${{github.workspace}} run: | - bash ci-utils/docker_copy_3rd_party_libs.sh ${{github.workspace}}/miniconda-for-nyxus/envs/nyxus-3.7 + bash ci-utils/docker_copy_3rd_party_libs.sh ${{github.workspace}}/miniconda-for-nyxus/envs/nyxus-3.8 - name: Create Version File run: python setup.py --version| grep -v init > VERSION diff --git a/ci-utils/build_conda.sh b/ci-utils/build_conda.sh index 33e9ff9f..56ba36ee 100755 --- a/ci-utils/build_conda.sh +++ b/ci-utils/build_conda.sh @@ -17,7 +17,7 @@ MINICONDA=$PWD/miniconda-for-nyxus # Modify this to your preferred location for CPP_BUILD_DIR=$PWD SRC_ROOT=$1 #source dir location NYXUS_ROOT=$SRC_ROOT -PYTHON=3.7 +PYTHON=3.8 git config --global --add safe.directory $NYXUS_ROOT @@ -62,8 +62,6 @@ setup_miniconda conda activate nyxus-$PYTHON export NYXUS_HOME=$CONDA_PREFIX #Build CLI -export CMAKE_ARGS="-DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -DBUILD_CLI=ON -DUSEGPU=ON" -echo $CMAKE_ARGS mkdir -p $CPP_BUILD_DIR pushd $CPP_BUILD_DIR cmake -DCMAKE_PREFIX_PATH=$CONDA_PREFIX \ diff --git a/ci-utils/docker_copy_3rd_party_libs.sh b/ci-utils/docker_copy_3rd_party_libs.sh index a855736b..6fad4904 100644 --- a/ci-utils/docker_copy_3rd_party_libs.sh +++ b/ci-utils/docker_copy_3rd_party_libs.sh @@ -16,15 +16,93 @@ fi mkdir -p 3rd_party_libs cp $1/lib/libblosc*.so* ./3rd_party_libs/ +cp $1/lib/libcufft*.so* ./3rd_party_libs/ +cp $1/lib/libtiff*.so* ./3rd_party_libs/ +cp $1/lib/libofstd*.so* ./3rd_party_libs/ +cp $1/lib/libdcmdata*.so* ./3rd_party_libs/ +cp $1/lib/libdcmjpeg*.so* ./3rd_party_libs/ +cp $1/lib/libdcmjpls*.so* ./3rd_party_libs/ +cp $1/lib/libdcmseg*.so* ./3rd_party_libs/ +cp $1/lib/libfmjpeg2k*.so* ./3rd_party_libs/ +cp $1/lib/libparquet*.so* ./3rd_party_libs/ +cp $1/lib/libarrow*.so* ./3rd_party_libs/ cp $1/lib/./liblz4*.so* ./3rd_party_libs/ cp $1/lib/./libsnappy*.so* ./3rd_party_libs/ cp $1/lib/./libz*.so* ./3rd_party_libs/ cp $1/lib/./libzstd*.so* ./3rd_party_libs/ -cp $1/lib/libtiff*.so* ./3rd_party_libs/ + cp $1/lib/./libwebp*.so* ./3rd_party_libs/ cp $1/lib/./liblzma*.so* ./3rd_party_libs/ cp $1/lib/./libLerc*.so* ./3rd_party_libs/ cp $1/lib/./libjpeg*.so* ./3rd_party_libs/ cp $1/lib/./libdeflate*.so* ./3rd_party_libs/ -cp $1/lib/./libdcm*.so* ./3rd_party_libs/ -cp $1/lib/./libfmjpeg2k*.so* ./3rd_party_libs/ +cp $1/lib/./liboflog*.so* ./3rd_party_libs/ +cp $1/lib/./libxml2*.so* ./3rd_party_libs/ +cp $1/lib/./libijg8*.so* ./3rd_party_libs/ +cp $1/lib/./libijg12*.so* ./3rd_party_libs/ +cp $1/lib/./libijg16*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmimgle*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmtkcharls*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmfg*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmiod*.so* ./3rd_party_libs/ +cp $1/lib/./libopenjp2*.so* ./3rd_party_libs/ +cp $1/lib/./libthrift*.so* ./3rd_party_libs/ +cp $1/lib/./libcrypto*.so* ./3rd_party_libs/ +cp $1/lib/./libbrotlienc*.so* ./3rd_party_libs/ +cp $1/lib/./libbrotlidec*.so* ./3rd_party_libs/ +cp $1/lib/./liborc*.so* ./3rd_party_libs/ +cp $1/lib/./libglog*.so* ./3rd_party_libs/ +cp $1/lib/./libutf8proc*.so* ./3rd_party_libs/ +cp $1/lib/./libbz2*.so* ./3rd_party_libs/ +cp $1/lib/./libgoogle_cloud_cpp_storage*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-identity-management*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-s3*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-core*.so* ./3rd_party_libs/ +cp $1/lib/./libre2*.so* ./3rd_party_libs/ +cp $1/lib/./libgoogle_cloud_cpp_common*.so* ./3rd_party_libs/ +cp $1/lib/./libabsl_time*.so* ./3rd_party_libs/ +cp $1/lib/./libabsl_time_zone*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-crt-cpp*.so* ./3rd_party_libs/ +cp $1/lib/././libsharpyuv*.so* ./3rd_party_libs/ +cp $1/lib/././libiconv*.so* ./3rd_party_libs/ +cp $1/lib/././libicui18n*.so* ./3rd_party_libs/ +cp $1/lib/././libicuuc*.so* ./3rd_party_libs/ +cp $1/lib/././libicudata*.so* ./3rd_party_libs/ +cp $1/lib/././libssl*.so* ./3rd_party_libs/ +cp $1/lib/././libbrotlicommon*.so* ./3rd_party_libs/ +cp $1/lib/././libprotobuf*.so* ./3rd_party_libs/ +cp $1/lib/././libgflags*.so* ./3rd_party_libs/ +cp $1/lib/././libgoogle_cloud_cpp_rest_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libcrc32c*.so* ./3rd_party_libs/ +cp $1/lib/././libcurl*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_crc32c*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_str_format_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_strings*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_strings_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-cpp-sdk-cognito-identity*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-cpp-sdk-sts*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-event-stream*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-checksums*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-common*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_int128*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_base*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_raw_logging_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-mqtt*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-s3*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-auth*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-http*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-io*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-cal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-sdkutils*.so* ./3rd_party_libs/ +cp $1/lib/./././libnghttp2*.so* ./3rd_party_libs/ +cp $1/lib/./././libssh2*.so* ./3rd_party_libs/ +cp $1/lib/./././libgssapi_krb5*.so* ./3rd_party_libs/ +cp $1/lib/./././libabsl_crc_internal*.so* ./3rd_party_libs/ +cp $1/lib/./././libabsl_spinlock_wait*.so* ./3rd_party_libs/ +cp $1/lib/./././libaws-c-compression*.so* ./3rd_party_libs/ +cp $1/lib/./././libs2n*.so* ./3rd_party_libs/ +cp $1/lib/././././libkrb5*.so* ./3rd_party_libs/ +cp $1/lib/././././libk5crypto*.so* ./3rd_party_libs/ +cp $1/lib/././././libcom_err*.so* ./3rd_party_libs/ +cp $1/lib/././././libkrb5support*.so* ./3rd_party_libs/ +cp $1/lib/././././libkeyutils*.so* ./3rd_party_libs/ From 32ea05cde86c736e2cd632a112f2c09aebf9f89a Mon Sep 17 00:00:00 2001 From: sameeul Date: Wed, 2 Aug 2023 13:19:26 -0400 Subject: [PATCH 5/9] Update docs --- README.md | 20 ++++++++++---------- ci-utils/docker_copy_3rd_party_libs.sh | 1 - docs/source/cmdline_and_examples.rst | 14 +++++++------- docs/source/devguide.rst | 4 ++-- 4 files changed, 19 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index fdf72cb5..243c166a 100644 --- a/README.md +++ b/README.md @@ -363,7 +363,7 @@ Assuming you [built the Nyxus binary](#building-from-source) as outlined below, |
Parameter
| Description | Type | |------|-------------|------| ---csvFile | Save csv file as one csv file for all the images or separate csv file for each image. Acceptable values: 'separatecsv' and 'singlecsv'. Default value: '--csvFile=separatecsv' | string constant +--outputType | Output type for feature values (speratecsv, singlecsv, arrow, parquet). Default value: '--outputType=separatecsv' | string constant --features | String constant or comma-seperated list of constants requesting a group of features or particular feature. Default value: '--features=\*ALL\*' | string --filePattern | Regular expression to match image files in directories specified by parameters '--intDir' and '--segDir'. To match all the files, use '--filePattern=.\*' | string --intDir | Directory of intensity image collection | path @@ -393,26 +393,26 @@ Assuming you [built the Nyxus binary](#building-from-source) as outlined below, Suppose we need to process intensity/mask images of channel 1 : ``` -./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.*_c1\.ome\.tif --csvFile=singlecsv +./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.*_c1\.ome\.tif --outputType=singlecsv ``` Example 2: __Running Nyxus to process specific image__ Suppose we need to process intensity/mask file p1_y2_r68_c1.ome.tif : ``` -./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=p1_y2_r68_c1\.ome\.tif --csvFile=singlecsv +./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=p1_y2_r68_c1\.ome\.tif --outputType=singlecsv ``` Example 3: __Running Nyxus to extract only intensity and basic morphology features__ ``` -./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --csvFile=singlecsv +./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --outputType=singlecsv ``` Example 4: __Skipping specified ROIs while extracting features__ Suppose we need to blacklist ROI labels 2 and 3 from the kurtosis feature extraction globally, in each image. The command line way to do that is using option __--skiproi__ : ```shell -./nyxus --skiproi=2,3 --features=KURTOSIS --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --csvFile=singlecsv +./nyxus --skiproi=2,3 --features=KURTOSIS --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --outputType=singlecsv ``` As a result, the default feature extraction result produced without option --skiproi looking like @@ -444,7 +444,7 @@ Note the comma character separator &nbs If we need to blacklist ROI labels 15 and 16 only in image image421.tif ROI label 17 in image image422.tif, we can do it via a per-file blacklist : ``` -./nyxus --skiproi=image421.tif:15,16;image421.tif:17 --features=KURTOSIS --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --csvFile=singlecsv +./nyxus --skiproi=image421.tif:15,16;image421.tif:17 --features=KURTOSIS --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output --filePattern=.* --outputType=singlecsv ``` Note the colon character   :   between the file name and backlisted labels within this file and semicolon character separator   ;   of file blacklists. @@ -477,7 +477,7 @@ Valid aggregation options are SUM, MEAN, MIN, MAX, WMA (weighted mean average), Example 6: __Processing an image set with nested ROI postprocessing__ ``` -nyxus --features=*ALL_intensity* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output/directory --filePattern=.* --csvFile=separatecsv --reduceThreads=4 --hsig=_c --hpar=1 --hchi=0 --hag=WMA +nyxus --features=*ALL_intensity* --intDir=/path/to/intensity/images --segDir=/path/to/mask/images --outDir=/path/to/output/directory --filePattern=.* --outputType=separatecsv --reduceThreads=4 --hsig=_c --hpar=1 --hchi=0 --hag=WMA ``` As a result, 2 additional CSV files will be produced for each mask image whose channel number matches the value of option '--hpar': file @@ -653,7 +653,7 @@ docker pull polusai/nyxus The following command line is an example of running the dockerized feature extractor (image hash 87f3b560bbf2) with only intensity features selected: ``` -docker run -it [--gpus all] --mount type=bind,source=/images/collections,target=/data 87f3b560bbf2 --intDir=/data/c1/int --segDir=/data/c1/seg --outDir=/data/output --filePattern=.* --csvFile=separatecsv --features=entropy,kurtosis,skewness,max_intensity,mean_intensity,min_intensity,median,mode,standard_deviation +docker run -it [--gpus all] --mount type=bind,source=/images/collections,target=/data 87f3b560bbf2 --intDir=/data/c1/int --segDir=/data/c1/seg --outDir=/data/output --filePattern=.* --outputType=separatecsv --features=entropy,kurtosis,skewness,max_intensity,mean_intensity,min_intensity,median,mode,standard_deviation ``` ### Install from sources and package into a Docker image @@ -693,8 +693,8 @@ Enter value for this parameter if neighbors touching cells needs to be calculate __Features:__ Comma separated list of features to be extracted. If all the features are required, then choose option __*all*__. -__Csvfile:__ -There are 2 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. +__outputType:__ +There are 4 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. __*arrow*__ - to save all the features extracted from all the images in arrow format. __*parquet*__ - to save all the features extracted from all the images in parquet format __Embedded pixel size:__ This is an optional parameter. Use this parameter only if units are present in the metadata and want to use those embedded units for the features extraction. If this option is selected, value for the length of unit and pixels per unit parameters are not required. diff --git a/ci-utils/docker_copy_3rd_party_libs.sh b/ci-utils/docker_copy_3rd_party_libs.sh index 6fad4904..969b3b27 100644 --- a/ci-utils/docker_copy_3rd_party_libs.sh +++ b/ci-utils/docker_copy_3rd_party_libs.sh @@ -30,7 +30,6 @@ cp $1/lib/./liblz4*.so* ./3rd_party_libs/ cp $1/lib/./libsnappy*.so* ./3rd_party_libs/ cp $1/lib/./libz*.so* ./3rd_party_libs/ cp $1/lib/./libzstd*.so* ./3rd_party_libs/ - cp $1/lib/./libwebp*.so* ./3rd_party_libs/ cp $1/lib/./liblzma*.so* ./3rd_party_libs/ cp $1/lib/./libLerc*.so* ./3rd_party_libs/ diff --git a/docs/source/cmdline_and_examples.rst b/docs/source/cmdline_and_examples.rst index 0ffc03ce..bb49b729 100644 --- a/docs/source/cmdline_and_examples.rst +++ b/docs/source/cmdline_and_examples.rst @@ -18,8 +18,8 @@ should adhere to columns "WIPP I/O role" and "WIPP type". - Type - WIPP I/O role - WIPP type - * - --csvFile - - Save csv file as one csv file for all the images or separate csv file for each image. Acceptable values: 'separatecsv' and 'singlecsv'. Default value: '--csvFile=separatecsv' + * - --outputType + - Output type for feature values. Acceptable value: speratecsv, singlecsv, arrow, parquet. Default value: '--outputType=separatecsv' - string constant - input - enum @@ -151,7 +151,7 @@ Suppose we need to extract only Zernike features and first 3 Hu's moments: .. code-block:: bash - ./nyxus --features=ZERNIKE2D,HU_M1,HU_M2,HU_M3 --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --csvFile=singlecsv + ./nyxus --features=ZERNIKE2D,HU_M1,HU_M2,HU_M3 --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --outputType=singlecsv 2. Requesting specific feature groups ------------------------------------- @@ -160,7 +160,7 @@ Suppose we need to extract only intensity features basic morphology features: .. code-block:: bash - ./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --csvFile=singlecsv + ./nyxus --features=*all_intensity*,*basic_morphology* --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --outputType=singlecsv 3. Mixing specific feature groups and individual features --------------------------------------------------------- @@ -169,7 +169,7 @@ Suppose we need to extract intensity features, basic morphology features, and Ze .. code-block:: bash - ./nyxus --features=*all_intensity*,*basic_morphology*,zernike2d --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --csvFile=singlecsv + ./nyxus --features=*all_intensity*,*basic_morphology*,zernike2d --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --outputType=singlecsv 4. Specifying a feature list from with a file instead of command line --------------------------------------------------------------------- @@ -185,7 +185,7 @@ Then the command line will be: .. code-block:: bash - ./nyxus --features=feature_list.txt --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --csvFile=singlecsv + ./nyxus --features=feature_list.txt --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/seg --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --outputType=singlecsv 5. Whole-image feature extraction --------------------------------- @@ -194,7 +194,7 @@ The regular operation mode of Nyxus is processing pairs of intensity and mask im .. code-block:: bash - ./nyxus --features=*basic_morphology* --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/int --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --csvFile=singlecsv + ./nyxus --features=*basic_morphology* --intDir=/home/ec2-user/data-ratbrain/int --segDir=/home/ec2-user/data-ratbrain/int --outDir=/home/ec2-user/work/OUTPUT-ratbrain --filePattern=.* --outputType=singlecsv 6. Regular and ad-hoc mapping between intensity and mask image files -------------------------------------------------------------------- diff --git a/docs/source/devguide.rst b/docs/source/devguide.rst index 7766d85e..f8dc225e 100644 --- a/docs/source/devguide.rst +++ b/docs/source/devguide.rst @@ -222,14 +222,14 @@ Often multiple features need to be calculated together and the user faces the ne .. code-block:: bash - nyxus --features=AREA_PIXELS_COUNT,AREA_UM2,CENTROID_X,CENTROID_Y,BBOX_YMIN,BBOX_XMIN,BBOX_HEIGHT,BBOX_WIDTH --intDir=/home/ec2-user/work/datasetXYZ/int --segDir=/home/ec2-user/work/dataXYZ/seg --outDir=/home/ec2-user/work/datasetXYZ --filePattern=.* --csvFile=separatecsv + nyxus --features=AREA_PIXELS_COUNT,AREA_UM2,CENTROID_X,CENTROID_Y,BBOX_YMIN,BBOX_XMIN,BBOX_HEIGHT,BBOX_WIDTH --intDir=/home/ec2-user/work/datasetXYZ/int --segDir=/home/ec2-user/work/dataXYZ/seg --outDir=/home/ec2-user/work/datasetXYZ --filePattern=.* --outputType=separatecsv Features can be grouped toegther and gived convenient aliases, for example the above features AREA_PIXELS_COUNT, AREA_UM2, CENTROID_X, CENTROID_Y, BBOX_YMIN, BBOX_XMIN, BBOX_HEIGHT, and BBOX_WIDTH can be refered to as \*BASIC_MORPHOLOGY\* . (Asterisks are a part of the alias and aren't special symbols.) The command line then becomes simpler .. code-block:: bash - nyxus --features=\ *BASIC_MORPHOLOGY* AREA_PIXELS_COUNT,AREA_UM2,CENTROID_X,CENTROID_Y,BBOX_YMIN,BBOX_XMIN,BBOX_HEIGHT,BBOX_WIDTH*\ * --intDir=/home/ec2-user/work/datasetXYZ/int --segDir=/home/ec2-user/work/dataXYZ/seg --outDir=/home/ec2-user/work/datasetXYZ --filePattern=.* --csvFile=separatecsv + nyxus --features=\ *BASIC_MORPHOLOGY* AREA_PIXELS_COUNT,AREA_UM2,CENTROID_X,CENTROID_Y,BBOX_YMIN,BBOX_XMIN,BBOX_HEIGHT,BBOX_WIDTH*\ * --intDir=/home/ec2-user/work/datasetXYZ/int --segDir=/home/ec2-user/work/dataXYZ/seg --outDir=/home/ec2-user/work/datasetXYZ --filePattern=.* --outputType=separatecsv Step 1 - giving an alias to a multiple features ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 79f50a1b338a23781ff739f2c8427127f5416a1c Mon Sep 17 00:00:00 2001 From: Sameeul B Samee Date: Thu, 3 Aug 2023 09:56:52 -0400 Subject: [PATCH 6/9] update docker build prep and some docs --- README.md | 4 +-- ci-utils/docker_copy_3rd_party_libs.sh | 44 +++----------------------- 2 files changed, 6 insertions(+), 42 deletions(-) diff --git a/README.md b/README.md index 243c166a..45095cf4 100644 --- a/README.md +++ b/README.md @@ -693,8 +693,8 @@ Enter value for this parameter if neighbors touching cells needs to be calculate __Features:__ Comma separated list of features to be extracted. If all the features are required, then choose option __*all*__. -__outputType:__ -There are 4 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. __*arrow*__ - to save all the features extracted from all the images in arrow format. __*parquet*__ - to save all the features extracted from all the images in parquet format +__Outputtype:__ +There are 4 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. __*Arrow*__ - to save all the features extracted from all the images in arrow format. __*Parquet*__ - to save all the features extracted from all the images in parquet format __Embedded pixel size:__ This is an optional parameter. Use this parameter only if units are present in the metadata and want to use those embedded units for the features extraction. If this option is selected, value for the length of unit and pixels per unit parameters are not required. diff --git a/ci-utils/docker_copy_3rd_party_libs.sh b/ci-utils/docker_copy_3rd_party_libs.sh index 969b3b27..f31eb10c 100644 --- a/ci-utils/docker_copy_3rd_party_libs.sh +++ b/ci-utils/docker_copy_3rd_party_libs.sh @@ -16,13 +16,9 @@ fi mkdir -p 3rd_party_libs cp $1/lib/libblosc*.so* ./3rd_party_libs/ -cp $1/lib/libcufft*.so* ./3rd_party_libs/ cp $1/lib/libtiff*.so* ./3rd_party_libs/ cp $1/lib/libofstd*.so* ./3rd_party_libs/ -cp $1/lib/libdcmdata*.so* ./3rd_party_libs/ -cp $1/lib/libdcmjpeg*.so* ./3rd_party_libs/ -cp $1/lib/libdcmjpls*.so* ./3rd_party_libs/ -cp $1/lib/libdcmseg*.so* ./3rd_party_libs/ +cp $1/lib/libdcm*.so* ./3rd_party_libs/ cp $1/lib/libfmjpeg2k*.so* ./3rd_party_libs/ cp $1/lib/libparquet*.so* ./3rd_party_libs/ cp $1/lib/libarrow*.so* ./3rd_party_libs/ @@ -40,10 +36,6 @@ cp $1/lib/./libxml2*.so* ./3rd_party_libs/ cp $1/lib/./libijg8*.so* ./3rd_party_libs/ cp $1/lib/./libijg12*.so* ./3rd_party_libs/ cp $1/lib/./libijg16*.so* ./3rd_party_libs/ -cp $1/lib/./libdcmimgle*.so* ./3rd_party_libs/ -cp $1/lib/./libdcmtkcharls*.so* ./3rd_party_libs/ -cp $1/lib/./libdcmfg*.so* ./3rd_party_libs/ -cp $1/lib/./libdcmiod*.so* ./3rd_party_libs/ cp $1/lib/./libopenjp2*.so* ./3rd_party_libs/ cp $1/lib/./libthrift*.so* ./3rd_party_libs/ cp $1/lib/./libcrypto*.so* ./3rd_party_libs/ @@ -53,15 +45,10 @@ cp $1/lib/./liborc*.so* ./3rd_party_libs/ cp $1/lib/./libglog*.so* ./3rd_party_libs/ cp $1/lib/./libutf8proc*.so* ./3rd_party_libs/ cp $1/lib/./libbz2*.so* ./3rd_party_libs/ -cp $1/lib/./libgoogle_cloud_cpp_storage*.so* ./3rd_party_libs/ -cp $1/lib/./libaws-cpp-sdk-identity-management*.so* ./3rd_party_libs/ -cp $1/lib/./libaws-cpp-sdk-s3*.so* ./3rd_party_libs/ -cp $1/lib/./libaws-cpp-sdk-core*.so* ./3rd_party_libs/ +cp $1/lib/./libgoogle*.so* ./3rd_party_libs/ +cp $1/lib/./libaws*.so* ./3rd_party_libs/ cp $1/lib/./libre2*.so* ./3rd_party_libs/ -cp $1/lib/./libgoogle_cloud_cpp_common*.so* ./3rd_party_libs/ -cp $1/lib/./libabsl_time*.so* ./3rd_party_libs/ -cp $1/lib/./libabsl_time_zone*.so* ./3rd_party_libs/ -cp $1/lib/./libaws-crt-cpp*.so* ./3rd_party_libs/ +cp $1/lib/./libabsl*.so* ./3rd_party_libs/ cp $1/lib/././libsharpyuv*.so* ./3rd_party_libs/ cp $1/lib/././libiconv*.so* ./3rd_party_libs/ cp $1/lib/././libicui18n*.so* ./3rd_party_libs/ @@ -71,34 +58,11 @@ cp $1/lib/././libssl*.so* ./3rd_party_libs/ cp $1/lib/././libbrotlicommon*.so* ./3rd_party_libs/ cp $1/lib/././libprotobuf*.so* ./3rd_party_libs/ cp $1/lib/././libgflags*.so* ./3rd_party_libs/ -cp $1/lib/././libgoogle_cloud_cpp_rest_internal*.so* ./3rd_party_libs/ cp $1/lib/././libcrc32c*.so* ./3rd_party_libs/ cp $1/lib/././libcurl*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_crc32c*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_str_format_internal*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_strings*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_strings_internal*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-cpp-sdk-cognito-identity*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-cpp-sdk-sts*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-event-stream*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-checksums*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-common*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_int128*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_base*.so* ./3rd_party_libs/ -cp $1/lib/././libabsl_raw_logging_internal*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-mqtt*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-s3*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-auth*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-http*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-io*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-cal*.so* ./3rd_party_libs/ -cp $1/lib/././libaws-c-sdkutils*.so* ./3rd_party_libs/ cp $1/lib/./././libnghttp2*.so* ./3rd_party_libs/ cp $1/lib/./././libssh2*.so* ./3rd_party_libs/ cp $1/lib/./././libgssapi_krb5*.so* ./3rd_party_libs/ -cp $1/lib/./././libabsl_crc_internal*.so* ./3rd_party_libs/ -cp $1/lib/./././libabsl_spinlock_wait*.so* ./3rd_party_libs/ -cp $1/lib/./././libaws-c-compression*.so* ./3rd_party_libs/ cp $1/lib/./././libs2n*.so* ./3rd_party_libs/ cp $1/lib/././././libkrb5*.so* ./3rd_party_libs/ cp $1/lib/././././libk5crypto*.so* ./3rd_party_libs/ From 3f36ce5d2d0574379308125d4b0c346525bdf551 Mon Sep 17 00:00:00 2001 From: Sameeul B Samee Date: Thu, 3 Aug 2023 10:58:24 -0400 Subject: [PATCH 7/9] Docker dependency update --- ci-utils/docker_copy_3rd_party_libs.sh | 46 +++++++++++++++++++++++--- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/ci-utils/docker_copy_3rd_party_libs.sh b/ci-utils/docker_copy_3rd_party_libs.sh index f31eb10c..746a3498 100644 --- a/ci-utils/docker_copy_3rd_party_libs.sh +++ b/ci-utils/docker_copy_3rd_party_libs.sh @@ -16,9 +16,13 @@ fi mkdir -p 3rd_party_libs cp $1/lib/libblosc*.so* ./3rd_party_libs/ +cp $1/lib/libstdc++*.so* 3rd_party_libs/ cp $1/lib/libtiff*.so* ./3rd_party_libs/ cp $1/lib/libofstd*.so* ./3rd_party_libs/ -cp $1/lib/libdcm*.so* ./3rd_party_libs/ +cp $1/lib/libdcmdata*.so* ./3rd_party_libs/ +cp $1/lib/libdcmjpeg*.so* ./3rd_party_libs/ +cp $1/lib/libdcmjpls*.so* ./3rd_party_libs/ +cp $1/lib/libdcmseg*.so* ./3rd_party_libs/ cp $1/lib/libfmjpeg2k*.so* ./3rd_party_libs/ cp $1/lib/libparquet*.so* ./3rd_party_libs/ cp $1/lib/libarrow*.so* ./3rd_party_libs/ @@ -36,6 +40,10 @@ cp $1/lib/./libxml2*.so* ./3rd_party_libs/ cp $1/lib/./libijg8*.so* ./3rd_party_libs/ cp $1/lib/./libijg12*.so* ./3rd_party_libs/ cp $1/lib/./libijg16*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmimgle*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmtkcharls*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmfg*.so* ./3rd_party_libs/ +cp $1/lib/./libdcmiod*.so* ./3rd_party_libs/ cp $1/lib/./libopenjp2*.so* ./3rd_party_libs/ cp $1/lib/./libthrift*.so* ./3rd_party_libs/ cp $1/lib/./libcrypto*.so* ./3rd_party_libs/ @@ -45,10 +53,15 @@ cp $1/lib/./liborc*.so* ./3rd_party_libs/ cp $1/lib/./libglog*.so* ./3rd_party_libs/ cp $1/lib/./libutf8proc*.so* ./3rd_party_libs/ cp $1/lib/./libbz2*.so* ./3rd_party_libs/ -cp $1/lib/./libgoogle*.so* ./3rd_party_libs/ -cp $1/lib/./libaws*.so* ./3rd_party_libs/ +cp $1/lib/./libgoogle_cloud_cpp_storage*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-identity-management*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-s3*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-cpp-sdk-core*.so* ./3rd_party_libs/ cp $1/lib/./libre2*.so* ./3rd_party_libs/ -cp $1/lib/./libabsl*.so* ./3rd_party_libs/ +cp $1/lib/./libgoogle_cloud_cpp_common*.so* ./3rd_party_libs/ +cp $1/lib/./libabsl_time*.so* ./3rd_party_libs/ +cp $1/lib/./libabsl_time_zone*.so* ./3rd_party_libs/ +cp $1/lib/./libaws-crt-cpp*.so* ./3rd_party_libs/ cp $1/lib/././libsharpyuv*.so* ./3rd_party_libs/ cp $1/lib/././libiconv*.so* ./3rd_party_libs/ cp $1/lib/././libicui18n*.so* ./3rd_party_libs/ @@ -58,14 +71,37 @@ cp $1/lib/././libssl*.so* ./3rd_party_libs/ cp $1/lib/././libbrotlicommon*.so* ./3rd_party_libs/ cp $1/lib/././libprotobuf*.so* ./3rd_party_libs/ cp $1/lib/././libgflags*.so* ./3rd_party_libs/ +cp $1/lib/././libgoogle_cloud_cpp_rest_internal*.so* ./3rd_party_libs/ cp $1/lib/././libcrc32c*.so* ./3rd_party_libs/ cp $1/lib/././libcurl*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_crc32c*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_str_format_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_strings*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_strings_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-cpp-sdk-cognito-identity*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-cpp-sdk-sts*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-event-stream*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-checksums*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-common*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_int128*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_base*.so* ./3rd_party_libs/ +cp $1/lib/././libabsl_raw_logging_internal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-mqtt*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-s3*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-auth*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-http*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-io*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-cal*.so* ./3rd_party_libs/ +cp $1/lib/././libaws-c-sdkutils*.so* ./3rd_party_libs/ cp $1/lib/./././libnghttp2*.so* ./3rd_party_libs/ cp $1/lib/./././libssh2*.so* ./3rd_party_libs/ cp $1/lib/./././libgssapi_krb5*.so* ./3rd_party_libs/ +cp $1/lib/./././libabsl_crc_internal*.so* ./3rd_party_libs/ +cp $1/lib/./././libabsl_spinlock_wait*.so* ./3rd_party_libs/ +cp $1/lib/./././libaws-c-compression*.so* ./3rd_party_libs/ cp $1/lib/./././libs2n*.so* ./3rd_party_libs/ cp $1/lib/././././libkrb5*.so* ./3rd_party_libs/ cp $1/lib/././././libk5crypto*.so* ./3rd_party_libs/ cp $1/lib/././././libcom_err*.so* ./3rd_party_libs/ cp $1/lib/././././libkrb5support*.so* ./3rd_party_libs/ -cp $1/lib/././././libkeyutils*.so* ./3rd_party_libs/ +cp $1/lib/././././libkeyutils*.so* ./3rd_party_libs/ \ No newline at end of file From 2440457376008f84cbc5d4708ee509d91231ed7a Mon Sep 17 00:00:00 2001 From: Sameeul B Samee Date: Fri, 4 Aug 2023 12:25:56 -0400 Subject: [PATCH 8/9] update docs as per review comments --- README.md | 2 +- plugin.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 45095cf4..398db35f 100644 --- a/README.md +++ b/README.md @@ -694,7 +694,7 @@ __Features:__ Comma separated list of features to be extracted. If all the features are required, then choose option __*all*__. __Outputtype:__ -There are 4 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. __*Arrow*__ - to save all the features extracted from all the images in arrow format. __*Parquet*__ - to save all the features extracted from all the images in parquet format +There are 4 options available under this category. __*Separatecsv*__ - to save all the features extracted for each image in separate csv file. __*Singlecsv*__ - to save all the features extracted from all the images in the same csv file. __*Arrow*__ - to save all the features extracted from all the images in Apache Arrow format. __*Parquet*__ - to save all the features extracted from all the images in Apache Parquet format __Embedded pixel size:__ This is an optional parameter. Use this parameter only if units are present in the metadata and want to use those embedded units for the features extraction. If this option is selected, value for the length of unit and pixels per unit parameters are not required. diff --git a/plugin.json b/plugin.json index d3fa0f99..035d208c 100644 --- a/plugin.json +++ b/plugin.json @@ -1124,7 +1124,7 @@ }, { "name": "outputType", - "description": "outputType : singlecsv for saving values in one csv file and separate csv to save values for each image in separate csv file, arrow and parquet for saving in those file formats respectively", + "description": "outputType : singlecsv for saving values in one csv file and separate csv to save values for each image in separate csv file, arrow and parquet for saving in Apache Arrow and Parquet file formats respectively", "type": "enum", "options": { "values": [ From 42f060bd2e1ec282302ba0fa710691f26b075736 Mon Sep 17 00:00:00 2001 From: sameeul Date: Tue, 8 Aug 2023 08:28:31 -0400 Subject: [PATCH 9/9] remove old stale file --- src/nyx/featureset.h.new | 462 --------------------------------------- 1 file changed, 462 deletions(-) delete mode 100644 src/nyx/featureset.h.new diff --git a/src/nyx/featureset.h.new b/src/nyx/featureset.h.new deleted file mode 100644 index c99348e5..00000000 --- a/src/nyx/featureset.h.new +++ /dev/null @@ -1,462 +0,0 @@ -#pragma once - -#include -#include - -namespace Nyxus -{ - /// @brief Feature codes - enum AvailableFeatures - { - //==== 2D features - - // Pixel intensity stats - INTEGRATED_INTENSITY = 0, - MEAN, - MEDIAN, - MIN, - MAX, - RANGE, - STANDARD_DEVIATION, - STANDARD_ERROR, - SKEWNESS, - KURTOSIS, - HYPERSKEWNESS, - HYPERFLATNESS, - MEAN_ABSOLUTE_DEVIATION, - ENERGY, - ROOT_MEAN_SQUARED, - ENTROPY, - MODE, - UNIFORMITY, - UNIFORMITY_PIU, - P01, P10, P25, P75, P90, P99, - INTERQUARTILE_RANGE, - ROBUST_MEAN_ABSOLUTE_DEVIATION, - - // Morphology: - AREA_PIXELS_COUNT, - AREA_UM2, - CENTROID_X, - CENTROID_Y, - WEIGHTED_CENTROID_Y, - WEIGHTED_CENTROID_X, - MASS_DISPLACEMENT, - COMPACTNESS, - BBOX_YMIN, - BBOX_XMIN, - BBOX_HEIGHT, - BBOX_WIDTH, - DIAMETER_EQUAL_AREA, - EXTENT, - ASPECT_RATIO, - // -- Legendre inertia ellipse - MAJOR_AXIS_LENGTH, - MINOR_AXIS_LENGTH, - // -- ellipticity related - ECCENTRICITY, - ELONGATION, - ORIENTATION, - ROUNDNESS, - - // --contour related - PERIMETER, - DIAMETER_EQUAL_PERIMETER, - EDGE_MEAN_INTENSITY, - EDGE_STDDEV_INTENSITY, - EDGE_MAX_INTENSITY, - EDGE_MIN_INTENSITY, - EDGE_INTEGRATED_INTENSITY, - CIRCULARITY, - - // -- convex hull related - CONVEX_HULL_AREA, - SOLIDITY, - - // -- erosions - EROSIONS_2_VANISH, - EROSIONS_2_VANISH_COMPLEMENT, - - // -- fractal dimension - FRACT_DIM_BOXCOUNT, - FRACT_DIM_PERIMETER, - - // Caliper: - MIN_FERET_DIAMETER, - MAX_FERET_DIAMETER, - MIN_FERET_ANGLE, - MAX_FERET_ANGLE, - - STAT_FERET_DIAM_MIN, - STAT_FERET_DIAM_MAX, - STAT_FERET_DIAM_MEAN, - STAT_FERET_DIAM_MEDIAN, - STAT_FERET_DIAM_STDDEV, - STAT_FERET_DIAM_MODE, - - STAT_MARTIN_DIAM_MIN, - STAT_MARTIN_DIAM_MAX, - STAT_MARTIN_DIAM_MEAN, - STAT_MARTIN_DIAM_MEDIAN, - STAT_MARTIN_DIAM_STDDEV, - STAT_MARTIN_DIAM_MODE, - - STAT_NASSENSTEIN_DIAM_MIN, - STAT_NASSENSTEIN_DIAM_MAX, - STAT_NASSENSTEIN_DIAM_MEAN, - STAT_NASSENSTEIN_DIAM_MEDIAN, - STAT_NASSENSTEIN_DIAM_STDDEV, - STAT_NASSENSTEIN_DIAM_MODE, - - // -- Chords - MAXCHORDS_MAX, - MAXCHORDS_MAX_ANG, - MAXCHORDS_MIN, - MAXCHORDS_MIN_ANG, - MAXCHORDS_MEDIAN, - MAXCHORDS_MEAN, - MAXCHORDS_MODE, - MAXCHORDS_STDDEV, - ALLCHORDS_MAX, - ALLCHORDS_MAX_ANG, - ALLCHORDS_MIN, - ALLCHORDS_MIN_ANG, - ALLCHORDS_MEDIAN, - ALLCHORDS_MEAN, - ALLCHORDS_MODE, - ALLCHORDS_STDDEV, - - EULER_NUMBER, - - EXTREMA_P1_X, EXTREMA_P1_Y, - EXTREMA_P2_X, EXTREMA_P2_Y, - EXTREMA_P3_X, EXTREMA_P3_Y, - EXTREMA_P4_X, EXTREMA_P4_Y, - EXTREMA_P5_X, EXTREMA_P5_Y, - EXTREMA_P6_X, EXTREMA_P6_Y, - EXTREMA_P7_X, EXTREMA_P7_Y, - EXTREMA_P8_X, EXTREMA_P8_Y, - - // -- polygonal representation - POLYGONALITY_AVE, - HEXAGONALITY_AVE, - HEXAGONALITY_STDDEV, - - DIAMETER_MIN_ENCLOSING_CIRCLE, - DIAMETER_CIRCUMSCRIBING_CIRCLE, - DIAMETER_INSCRIBING_CIRCLE, - - GEODETIC_LENGTH, - THICKNESS, - - // -- ROI radius features - ROI_RADIUS_MEAN, - ROI_RADIUS_MAX, - ROI_RADIUS_MEDIAN, - - // Neighbor features - NUM_NEIGHBORS, - PERCENT_TOUCHING, - CLOSEST_NEIGHBOR1_DIST, - CLOSEST_NEIGHBOR1_ANG, - CLOSEST_NEIGHBOR2_DIST, - CLOSEST_NEIGHBOR2_ANG, - ANG_BW_NEIGHBORS_MEAN, - ANG_BW_NEIGHBORS_STDDEV, - ANG_BW_NEIGHBORS_MODE, - - // GLCM: - GLCM_ANGULAR2NDMOMENT, - GLCM_ACOR, // Autocorrelation, IBSI # QWB0 - GLCM_CLUPROM, // Cluster prominence, IBSI # AE86 - GLCM_CLUSHADE, // Cluster shade, IBSI # 7NFM - GLCM_CLUTEND, // Cluster tendency, IBSI # DG8W - GLCM_CONTRAST, - GLCM_CORRELATION, - GLCM_DIFFERENCEAVERAGE, - GLCM_DIFFERENCEENTROPY, - GLCM_DIFFERENCEVARIANCE, - GLCM_DIS, // Dissimilarity, IBSI # 8S9J - GLCM_ENERGY, - GLCM_ENTROPY, - GLCM_HOMOGENEITY, // Homogeneity-1 (PyR) - GLCM_HOM2, // Homogeneity-2 (PyR) - GLCM_IDMN, // Inv diff mom normalized, IBSI # 1QCO - GLCM_ID, // Inv diff, IBSI # IB1Z - GLCM_IDN, // Inv diff normalized, IBSI # NDRX - GLCM_IV, // Inv variance, IBSI # E8JP - GLCM_JAVE, // Joint average, IBSI # 60VM - GLCM_JE, // Joint entropy, IBSI # TU9B - GLCM_JMAX, // Joint max (aka PyR max probability), IBSI # GYBY - GLCM_JVAR, // Joint var (aka PyR Sum of Squares), IBSI # UR99 - GLCM_INFOMEAS1, - GLCM_INFOMEAS2, - GLCM_INVERSEDIFFERENCEMOMENT, // Inv diff mom, IBSI # WF0Z - GLCM_SUMAVERAGE, - GLCM_SUMENTROPY, - GLCM_SUMVARIANCE, // Sum variance, IBSI # OEEB - GLCM_VARIANCE, - - // GLRLM: - GLRLM_SRE, // Short Run Emphasis - GLRLM_LRE, // Long Run Emphasis - GLRLM_GLN, // Gray Level Non-Uniformity - GLRLM_GLNN, // Gray Level Non-Uniformity Normalized - GLRLM_RLN, // Run Length Non-Uniformity - GLRLM_RLNN, // Run Length Non-Uniformity Normalized - GLRLM_RP, // Run Percentage - GLRLM_GLV, // Gray Level Variance - GLRLM_RV, // Run Variance - GLRLM_RE, // Run Entropy - GLRLM_LGLRE, // Low Gray Level Run Emphasis - GLRLM_HGLRE, // High Gray Level Run Emphasis - GLRLM_SRLGLE, // Short Run Low Gray Level Emphasis - GLRLM_SRHGLE, // Short Run High Gray Level Emphasis - GLRLM_LRLGLE, // Long Run Low Gray Level Emphasis - GLRLM_LRHGLE, // Long Run High Gray Level Emphasis - - // GLDZM: - GLDZM_SDE, // Small Distance Emphasis - GLDZM_LDE, // Large Distance Emphasis - GLDZM_LGLE, // Low Grey Level Emphasis - GLDZM_HGLE, // High GreyLevel Emphasis - GLDZM_SDLGLE, // Small Distance Low Grey Level Emphasis - GLDZM_SDHGLE, // Small Distance High GreyLevel Emphasis - GLDZM_LDLGLE, // Large Distance Low Grey Level Emphasis - GLDZM_LDHGLE, // Large Distance High Grey Level Emphasis - GLDZM_GLNU, // Grey Level Non Uniformity - GLDZM_GLNUN, // Grey Level Non Uniformity Normalized - GLDZM_ZDNU, // Zone Distance Non Uniformity - GLDZM_ZDNUN, // Zone Distance Non Uniformity Normalized - GLDZM_ZP, // Zone Percentage - GLDZM_GLM, // Grey Level Mean - GLDZM_GLV, // Grey Level Variance - GLDZM_ZDM, // Zone Distance Mean - GLDZM_ZDV, // Zone Distance Variance - GLDZM_ZDE, // Zone Distance Entropy - - // GLSZM: - GLSZM_SAE, // Small Area Emphasis - GLSZM_LAE, // Large Area Emphasis - GLSZM_GLN, // Gray Level Non - Uniformity - GLSZM_GLNN, // Gray Level Non - Uniformity Normalized - GLSZM_SZN, // Size - Zone Non - Uniformity - GLSZM_SZNN, // Size - Zone Non - Uniformity Normalized - GLSZM_ZP, // Zone Percentage - GLSZM_GLV, // Gray Level Variance - GLSZM_ZV, // Zone Variance - GLSZM_ZE, // Zone Entropy - GLSZM_LGLZE, // Low Gray Level Zone Emphasis - GLSZM_HGLZE, // High Gray Level Zone Emphasis - GLSZM_SALGLE, // Small Area Low Gray Level Emphasis - GLSZM_SAHGLE, // Small Area High Gray Level Emphasis - GLSZM_LALGLE, // Large Area Low Gray Level Emphasis - GLSZM_LAHGLE, // Large Area High Gray Level Emphasis - - // GLDM: - GLDM_SDE, // Small Dependence Emphasis(SDE) - GLDM_LDE, // Large Dependence Emphasis (LDE) - GLDM_GLN, // Gray Level Non-Uniformity (GLN) - GLDM_DN, // Dependence Non-Uniformity (DN) - GLDM_DNN, // Dependence Non-Uniformity Normalized (DNN) - GLDM_GLV, // Gray Level Variance (GLV) - GLDM_DV, // Dependence Variance (DV) - GLDM_DE, // Dependence Entropy (DE) - GLDM_LGLE, // Low Gray Level Emphasis (LGLE) - GLDM_HGLE, // High Gray Level Emphasis (HGLE) - GLDM_SDLGLE, // Small Dependence Low Gray Level Emphasis (SDLGLE) - GLDM_SDHGLE, // Small Dependence High Gray Level Emphasis (SDHGLE) - GLDM_LDLGLE, // Large Dependence Low Gray Level Emphasis (LDLGLE) - GLDM_LDHGLE, // Large Dependence High Gray Level Emphasis (LDHGLE) - - // NGTDM: - NGTDM_COARSENESS, - NGTDM_CONTRAST, - NGTDM_BUSYNESS, - NGTDM_COMPLEXITY, - NGTDM_STRENGTH, - - // Radial intensity distribution: - ZERNIKE2D, - FRAC_AT_D, - MEAN_FRAC, - RADIAL_CV, - - // Spatial (raw) moments - SPAT_MOMENT_00, - SPAT_MOMENT_01, - SPAT_MOMENT_02, - SPAT_MOMENT_03, - SPAT_MOMENT_10, - SPAT_MOMENT_11, - SPAT_MOMENT_12, - SPAT_MOMENT_20, - SPAT_MOMENT_21, - SPAT_MOMENT_30, - - // Weighted spatial moments - WEIGHTED_SPAT_MOMENT_00, - WEIGHTED_SPAT_MOMENT_01, - WEIGHTED_SPAT_MOMENT_02, - WEIGHTED_SPAT_MOMENT_03, - WEIGHTED_SPAT_MOMENT_10, - WEIGHTED_SPAT_MOMENT_11, - WEIGHTED_SPAT_MOMENT_12, - WEIGHTED_SPAT_MOMENT_20, - WEIGHTED_SPAT_MOMENT_21, - WEIGHTED_SPAT_MOMENT_30, - - // Central moments - CENTRAL_MOMENT_02, - CENTRAL_MOMENT_03, - CENTRAL_MOMENT_11, - CENTRAL_MOMENT_12, - CENTRAL_MOMENT_20, - CENTRAL_MOMENT_21, - CENTRAL_MOMENT_30, - - // Weighted central moments - WEIGHTED_CENTRAL_MOMENT_02, - WEIGHTED_CENTRAL_MOMENT_03, - WEIGHTED_CENTRAL_MOMENT_11, - WEIGHTED_CENTRAL_MOMENT_12, - WEIGHTED_CENTRAL_MOMENT_20, - WEIGHTED_CENTRAL_MOMENT_21, - WEIGHTED_CENTRAL_MOMENT_30, - - // Normalized central moments - NORM_CENTRAL_MOMENT_02, - NORM_CENTRAL_MOMENT_03, - NORM_CENTRAL_MOMENT_11, - NORM_CENTRAL_MOMENT_12, - NORM_CENTRAL_MOMENT_20, - NORM_CENTRAL_MOMENT_21, - NORM_CENTRAL_MOMENT_30, - - // Normalized (standardized) spatial moments - NORM_SPAT_MOMENT_00, - NORM_SPAT_MOMENT_01, - NORM_SPAT_MOMENT_02, - NORM_SPAT_MOMENT_03, - NORM_SPAT_MOMENT_10, - NORM_SPAT_MOMENT_20, - NORM_SPAT_MOMENT_30, - - // Hu's moments 1-7 - HU_M1, - HU_M2, - HU_M3, - HU_M4, - HU_M5, - HU_M6, - HU_M7, - - // Weighted Hu's moments 1-7 - WEIGHTED_HU_M1, - WEIGHTED_HU_M2, - WEIGHTED_HU_M3, - WEIGHTED_HU_M4, - WEIGHTED_HU_M5, - WEIGHTED_HU_M6, - WEIGHTED_HU_M7, - - GABOR, - - _COUNT_ - }; -} - -using namespace Nyxus; - -/// @brief Helper class to set and access user feature selection made via the command line or Python interface. -class FeatureSet -{ -public: - FeatureSet(); - void enableAll (bool newStatus = true) { for (int i = 0; i < AvailableFeatures::_COUNT_; i++) m_enabledFeatures[i] = newStatus; } - void disableFeatures (std::initializer_list& desiredFeatures) - { - for (auto f : desiredFeatures) - m_enabledFeatures[f] = false; - } - void enableFeatures(std::initializer_list& desiredFeatures) { - for (auto f : desiredFeatures) - m_enabledFeatures[f] = true; - } - void enableFeature(AvailableFeatures f) { - m_enabledFeatures[f] = true; - } - void enablePixelIntenStats() { - enableAll(false); - m_enabledFeatures[MEAN] = - m_enabledFeatures[MEDIAN] = - m_enabledFeatures[MIN] = - m_enabledFeatures[MAX] = - m_enabledFeatures[RANGE] = - m_enabledFeatures[STANDARD_DEVIATION] = - m_enabledFeatures[SKEWNESS] = - m_enabledFeatures[KURTOSIS] = - m_enabledFeatures[MEAN_ABSOLUTE_DEVIATION] = - m_enabledFeatures[ENERGY] = - m_enabledFeatures[ROOT_MEAN_SQUARED] = - m_enabledFeatures[ENTROPY] = - m_enabledFeatures[MODE] = - m_enabledFeatures[UNIFORMITY] = - m_enabledFeatures[P10] = m_enabledFeatures[P25] = m_enabledFeatures[P75] = m_enabledFeatures[P90] = - m_enabledFeatures[INTERQUARTILE_RANGE] = - m_enabledFeatures[ROBUST_MEAN_ABSOLUTE_DEVIATION] = - m_enabledFeatures[WEIGHTED_CENTROID_Y] = - m_enabledFeatures[WEIGHTED_CENTROID_X] = - m_enabledFeatures[MASS_DISPLACEMENT] = true; - } - void enableBoundingBox() { - enableAll(false); - m_enabledFeatures[BBOX_YMIN] = - m_enabledFeatures[BBOX_XMIN] = - m_enabledFeatures[BBOX_HEIGHT] = - m_enabledFeatures[BBOX_WIDTH] = true; - } - void enableFeret() { - enableAll(false); - m_enabledFeatures[MIN_FERET_DIAMETER] = - m_enabledFeatures[MAX_FERET_DIAMETER] = - m_enabledFeatures[MIN_FERET_ANGLE] = - m_enabledFeatures[MAX_FERET_ANGLE] = - m_enabledFeatures[STAT_FERET_DIAM_MIN] = - m_enabledFeatures[STAT_FERET_DIAM_MAX] = - m_enabledFeatures[STAT_FERET_DIAM_MEAN] = - m_enabledFeatures[STAT_FERET_DIAM_MEDIAN] = - m_enabledFeatures[STAT_FERET_DIAM_STDDEV] = - m_enabledFeatures[STAT_FERET_DIAM_MODE] = true; - } - bool isEnabled(int fc) const { return fc < AvailableFeatures::_COUNT_ ? m_enabledFeatures[fc] : false; } - bool anyEnabled(std::initializer_list F) const - { - for (auto f : F) - if (m_enabledFeatures[f]) - return true; - return false; - } - int numOfEnabled() { - int cnt = 0; - for (int i = 0; i < AvailableFeatures::_COUNT_; i++) - if (m_enabledFeatures[i]) - cnt++; - return cnt; - } - bool findFeatureByString (const std::string& featureName, AvailableFeatures& fcode); - std::string findFeatureNameByCode (AvailableFeatures fcode); - void show_help(); - - // Relying on RVO rather than std::move - std::vector> getEnabledFeatures(); - -private: - bool m_enabledFeatures[AvailableFeatures::_COUNT_]; -}; - -namespace Nyxus -{ - extern FeatureSet theFeatureSet; - extern std::map UserFacingFeatureNames; -}