From 0f889f4b8f06a1310ac089384a8bc4072c727427 Mon Sep 17 00:00:00 2001 From: Leonard Date: Tue, 18 Jun 2019 16:23:37 +0200 Subject: [PATCH 1/8] updated script to merge HDF5 resultfiles --- misc/mergeResultFiles.py | 109 +++++++++++++++++++++++++-------------- 1 file changed, 69 insertions(+), 40 deletions(-) diff --git a/misc/mergeResultFiles.py b/misc/mergeResultFiles.py index fa44b9077..b88d50f73 100755 --- a/misc/mergeResultFiles.py +++ b/misc/mergeResultFiles.py @@ -6,54 +6,83 @@ """ -import numpy as np import h5py -import sys -import re -import os +import glob +import argparse -def printUsage(): - print('Usage: %s INFILE ... OUTFILE' % __file__) - -def mergeFiles(infiles, outfile): + +def merge_files(in_files, out_file, save_iteration, same_input): """ - Read final multistart optimization results from HDF5 infile and write to HDF5 outfile. + Read final multistart optimization results from HDF5 in_files and write to HDF5 out_file. """ - - fOut = h5py.File(outfile, "w") - - for infile in infiles: - print(infile) - with h5py.File(infile, "r") as fIn: - copyRecursive(fIn["/"], fOut["/"]) - -def copyRecursive(inObj, rootOut): + if isinstance(in_files, list): + in_files_list = in_files + else: + in_files_list = glob.glob(in_files) + + f_out = h5py.File(out_file, "w") + + start_idx = 0 + if same_input: + f_out.create_group('multistarts') + for in_file in in_files_list: + print(in_file) + with h5py.File(in_file, "r") as fIn: + if '/inputData' not in f_out: + fIn.copy('/inputData', f_out) + copy_multistarts(fIn, f_out, start_idx, save_iteration) + f_out['multistarts/' + str(start_idx)].attrs['soure_file'] = in_file.split(sep='/')[-1] + start_idx += 1 + else: + for in_file in in_files_list: + print(in_file) + with h5py.File(in_file, "r") as fIn: + fIn.copy('/', f_out, in_file.split(sep='/')[-1]) + + +def copy_multistarts(f_in, f_out, start_idx, save_iteration): """ - Recursively copy inObj to the same path under rootOut - - NOTE: no attributes are copied for now + Copy all starts in fIn to fOut. """ - print("\tCopy %s to %s" % (inObj.name, rootOut.name)) - dest = rootOut.name + inObj.parent.name - if(not inObj.name in rootOut): - print("\tDestination %s does not exists: copy %s " %(dest, inObj.name)) - inObj.copy(inObj.name, rootOut.file[dest]) - else: - print("\tDestination %s exists: copy members of %s" %(dest, inObj.name)) - for obj in inObj: - print("\t\t" + obj) - copyRecursive(inObj[obj], rootOut) + for start in f_in['/multistarts']: + if save_iteration: + f_in.copy('/multistarts/' + start, f_out, 'multistarts/' + str(start_idx)) + else: + for key in f_in['/multistarts/' + start].keys(): + if not (key == 'iteration'): + f_in.copy('/multistarts/' + start + '/' + key, f_out, 'multistarts/' + str(start_idx) + '/' + key) + + +def parse_cli_args(): + """Parse command line arguments""" + + parser = argparse.ArgumentParser( + description='Merge h5 resultfiles.') + + parser.add_argument('-f', '--in-files', dest='in_files', + required=True, + help='Path for files to be merged. E.g. results/*-rank00000.h5. Or list with file names.') + + parser.add_argument('-o', '--out-file', dest='out_file', + required=True, + help='Name of HDF5 file to be generated') + + parser.add_argument('-i', '--save-iteration', dest='save_iteration', default=False, + help='Save optimizer information for all iterations') + + parser.add_argument('-s', '--same-input', dest='same_input', default=True, + help='Merges multistarts with the same inputData.') + args = parser.parse_args() + + return args + def main(): - numRequiredArgs = 2 - if len(sys.argv) <= numRequiredArgs + 1: - printUsage() - sys.exit(1) - - infiles = sys.argv[1:(len(sys.argv) - 1)] - outfile = sys.argv[-1] - - mergeFiles(infiles, outfile) + + args = parse_cli_args() + merge_files(args.in_files, args.out_file, args.save_iteration, args.same_input) + + if __name__ == "__main__": main() From 07c3a04e50e9c01cef252e439f5504c08acd70c8 Mon Sep 17 00:00:00 2001 From: LeonardSchmiester Date: Wed, 19 Jun 2019 09:12:07 +0200 Subject: [PATCH 2/8] Update misc/mergeResultFiles.py fixed typo Co-Authored-By: Daniel Weindl --- misc/mergeResultFiles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/mergeResultFiles.py b/misc/mergeResultFiles.py index b88d50f73..4f352af14 100755 --- a/misc/mergeResultFiles.py +++ b/misc/mergeResultFiles.py @@ -31,7 +31,7 @@ def merge_files(in_files, out_file, save_iteration, same_input): if '/inputData' not in f_out: fIn.copy('/inputData', f_out) copy_multistarts(fIn, f_out, start_idx, save_iteration) - f_out['multistarts/' + str(start_idx)].attrs['soure_file'] = in_file.split(sep='/')[-1] + f_out['multistarts/' + str(start_idx)].attrs['source_file'] = in_file.split(sep='/')[-1] start_idx += 1 else: for in_file in in_files_list: From 7f569a9e856efd9d2fda6c1cd7906666b26b5652 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 09:20:15 +0200 Subject: [PATCH 3/8] SuperMUC build script: skip boost --- buildSuperMUC.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/buildSuperMUC.sh b/buildSuperMUC.sh index 5e683726f..1b551d62f 100755 --- a/buildSuperMUC.sh +++ b/buildSuperMUC.sh @@ -66,7 +66,7 @@ build_boost() { build_3rd_party_deps() { # build dependencies - build_boost + # build_boost "${parpe_root}/ThirdParty/installIpopt.sh" #./installCeres.sh #./installCpputest.sh @@ -89,8 +89,8 @@ build_parpe() { mkdir -p build && cd build ipopt_root=${parpe_root}/ThirdParty/Ipopt-releases-3.13.3/ + #BOOST_ROOT=${boost_install_dir} \ HDF5_ROOT=${HDF5_BASE} \ - BOOST_ROOT=${boost_install_dir} \ MPI_HOME=${MPI_BASE} \ PKG_CONFIG_PATH=${PKG_CONFIG_PATH:-}:${ipopt_root}/install/lib/pkgconfig/:${ipopt_root}/ThirdParty-HSL/install/lib/pkgconfig/ \ cmake -S .. \ From eb33a10abde3faf2835ce846482817df06afcfe1 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 22:14:58 +0200 Subject: [PATCH 4/8] Fix race condition with parallel HDF5 calls (#345) * Fix race condition with parallel HDF5 calls Add some locks, in particular for H5Location::nameExists. Also remove some unnecessary H5File::getId calls. * Safely close HDF5 files * Check pthread_create retval * cleanup --- ...lOptimizationAnalyticalParameterProvider.h | 6 +- .../parpeamici/multiConditionDataProvider.h | 2 +- include/parpeamici/simulationResultWriter.h | 2 + .../optimizationResultWriter.h | 2 +- ...ptimizationAnalyticalParameterProvider.cpp | 12 ++- src/parpeamici/multiConditionDataProvider.cpp | 49 +++++++---- src/parpeamici/multiConditionProblem.cpp | 16 ++-- src/parpeamici/optimizationApplication.cpp | 1 + src/parpeamici/simulationResultWriter.cpp | 6 ++ src/parpeamici/standaloneSimulator.cpp | 15 ++-- src/parpecommon/hdf5Misc.cpp | 81 ++++++++++--------- .../multiStartOptimization.cpp | 21 +++-- src/parpeoptimization/optimizationOptions.cpp | 10 ++- .../optimizationResultWriter.cpp | 41 ++++++---- 14 files changed, 165 insertions(+), 99 deletions(-) diff --git a/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h b/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h index 28c495698..40ff62f74 100644 --- a/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h +++ b/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h @@ -124,6 +124,8 @@ class AnalyticalParameterHdf5Reader : public AnalyticalParameterProvider */ std::vector getOptimizationParameterIndices() const override; + ~AnalyticalParameterHdf5Reader() override; + private: /** * @brief Get number of analytically computed parameters @@ -142,9 +144,9 @@ class AnalyticalParameterHdf5Reader : public AnalyticalParameterProvider * - observableIdx: index of model output */ void readParameterConditionObservableMappingFromFile(); - std::vector readRawMap(H5::DataSet& dataset, + std::vector readRawMap(const H5::DataSet &dataset, hsize_t& nRows, - hsize_t& nCols); + hsize_t& nCols) const; H5::H5File file; std::string rootPath; diff --git a/include/parpeamici/multiConditionDataProvider.h b/include/parpeamici/multiConditionDataProvider.h index cedca9259..f3ae58b29 100644 --- a/include/parpeamici/multiConditionDataProvider.h +++ b/include/parpeamici/multiConditionDataProvider.h @@ -249,7 +249,7 @@ class MultiConditionDataProviderHDF5 : public MultiConditionDataProvider MultiConditionDataProviderHDF5(MultiConditionDataProviderHDF5 const&) = delete; - virtual ~MultiConditionDataProviderHDF5() override = default; + virtual ~MultiConditionDataProviderHDF5() override;; /** * @brief Get the number of simulations required for objective function diff --git a/include/parpeamici/simulationResultWriter.h b/include/parpeamici/simulationResultWriter.h index 53a18dac9..7fccdaef2 100644 --- a/include/parpeamici/simulationResultWriter.h +++ b/include/parpeamici/simulationResultWriter.h @@ -51,6 +51,8 @@ class SimulationResultWriter { // Implement me SimulationResultWriter(SimulationResultWriter const&) = delete; + ~SimulationResultWriter(); + /** * @brief Create results datasets. * diff --git a/include/parpeoptimization/optimizationResultWriter.h b/include/parpeoptimization/optimizationResultWriter.h index d91338663..abb5c26f6 100644 --- a/include/parpeoptimization/optimizationResultWriter.h +++ b/include/parpeoptimization/optimizationResultWriter.h @@ -39,7 +39,7 @@ class OptimizationResultWriter { OptimizationResultWriter(OptimizationResultWriter const& other); - virtual ~OptimizationResultWriter() = default; + virtual ~OptimizationResultWriter(); /** * @brief Function to be called after each objective function f(x) or diff --git a/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp b/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp index a817777f5..76fd3887b 100644 --- a/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp +++ b/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp @@ -83,6 +83,12 @@ AnalyticalParameterHdf5Reader::getOptimizationParameterIndices() const return analyticalParameterIndices; } +AnalyticalParameterHdf5Reader::~AnalyticalParameterHdf5Reader() +{ + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + int AnalyticalParameterHdf5Reader::getNumAnalyticalParameters() const { @@ -136,10 +142,12 @@ AnalyticalParameterHdf5Reader::readParameterConditionObservableMappingFromFile() } std::vector -AnalyticalParameterHdf5Reader::readRawMap(H5::DataSet& dataset, +AnalyticalParameterHdf5Reader::readRawMap(H5::DataSet const& dataset, hsize_t& nRows, - hsize_t& nCols) + hsize_t& nCols) const { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto dataspace = dataset.getSpace(); auto ndims = dataspace.getSimpleExtentNdims(); if (ndims != 2) diff --git a/src/parpeamici/multiConditionDataProvider.cpp b/src/parpeamici/multiConditionDataProvider.cpp index 7075a4a4d..3557c57de 100644 --- a/src/parpeamici/multiConditionDataProvider.cpp +++ b/src/parpeamici/multiConditionDataProvider.cpp @@ -21,8 +21,13 @@ MultiConditionDataProviderHDF5::MultiConditionDataProviderHDF5( : MultiConditionDataProviderHDF5(std::move(model), hdf5Filename, "") {} +MultiConditionDataProviderHDF5::~MultiConditionDataProviderHDF5() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file_.close(); +} + MultiConditionDataProviderHDF5::MultiConditionDataProviderHDF5( - std::unique_ptr model, + std::unique_ptr model, std::string const& hdf5Filename, std::string const& rootPath) : model_(std::move(model)) @@ -70,8 +75,8 @@ MultiConditionDataProviderHDF5::getNumberOfSimulationConditions() const [[maybe_unused]] auto lock = hdf5MutexGetLock(); int d1, d2; - hdf5GetDatasetDimensions( - file_.getId(), hdf5_reference_condition_path_.c_str(), 2, &d1, &d2); + hdf5GetDatasetDimensions(file_, hdf5_reference_condition_path_, + 2, &d1, &d2); return d1; } @@ -82,6 +87,8 @@ MultiConditionDataProviderHDF5::getSimulationToOptimizationParameterMapping( { std::string path = hdf5_simulation_to_optimization_parameter_mapping_path_; + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + if (file_.nameExists(path)) { return hdf5Read2DIntegerHyperslab( file_, path, model_->np(), 1, 0, conditionIdx); @@ -129,15 +136,20 @@ MultiConditionDataProviderHDF5::mapAndSetOptimizationToSimulationVariables( auto mapping = getSimulationToOptimizationParameterMapping(conditionIdx); std::vector overrides; - if (file_.nameExists(hdf5_parameter_overrides_path)) { - overrides.resize(model_->np()); - hdf5Read2DDoubleHyperslab(file_, - hdf5_parameter_overrides_path, - model_->np(), - 1, - 0, - conditionIdx, - overrides); + + { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + + if (file_.nameExists(hdf5_parameter_overrides_path)) { + overrides.resize(model_->np()); + hdf5Read2DDoubleHyperslab(file_, + hdf5_parameter_overrides_path, + model_->np(), + 1, + 0, + conditionIdx, + overrides); + } } for (int i = 0; i < model_->np(); ++i) { @@ -260,8 +272,8 @@ MultiConditionDataProviderHDF5::readFixedSimulationParameters( H5_SAVE_ERROR_HANDLER; - hdf5Read2DDoubleHyperslab(file_.getId(), - hdf5_condition_path_.c_str(), + hdf5Read2DDoubleHyperslab(file_, + hdf5_condition_path_, model_->nk(), 1, 0, @@ -392,7 +404,7 @@ MultiConditionDataProviderHDF5::getNumOptimizationParameters() const { std::string path = root_path_ + "/parameters/parameterNames"; int size = 0; - hdf5GetDatasetDimensions(file_.getId(), path.c_str(), 1, &size); + hdf5GetDatasetDimensions(file_, path, 1, &size); return size; } @@ -436,6 +448,7 @@ MultiConditionDataProviderHDF5::updateSimulationParametersAndScale( void MultiConditionDataProviderHDF5::copyInputData(H5::H5File const& target) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); H5Ocopy(file_.getId(), "/", @@ -515,6 +528,7 @@ H5::H5File MultiConditionDataProviderHDF5::getHdf5File() const void MultiConditionDataProviderHDF5::checkDataIntegrity() const { + // check matching IDs std::string modelParameterIdsPath = root_path_ + "/model/parameterIds"; auto dataParameterIds = @@ -565,7 +579,7 @@ MultiConditionDataProviderHDF5::checkDataIntegrity() const if (model_->nk()) { parpe::hdf5GetDatasetDimensions( - file_.getId(), hdf5_condition_path_.c_str(), 2, &d1, &d2); + file_, hdf5_condition_path_, 2, &d1, &d2); Expects(d1 == model_->nk()); } } @@ -678,6 +692,8 @@ std::vector> MultiConditionDataProviderDefault::getAllMeasurements() const { std::vector> measurements; + measurements.reserve(edata_.size()); + for (const auto& e : edata_) { measurements.push_back(e.getObservedData()); } @@ -688,6 +704,7 @@ std::vector> MultiConditionDataProviderDefault::getAllSigmas() const { std::vector> sigmas; + sigmas.reserve(edata_.size()); for (const auto& e : edata_) { sigmas.push_back(e.getObservedDataStdDev()); } diff --git a/src/parpeamici/multiConditionProblem.cpp b/src/parpeamici/multiConditionProblem.cpp index 3e5c84390..08657a889 100644 --- a/src/parpeamici/multiConditionProblem.cpp +++ b/src/parpeamici/multiConditionProblem.cpp @@ -258,31 +258,31 @@ void saveSimulation(const H5::H5File &file, std::string const& pathStr, [[maybe_unused]] auto lock = hdf5MutexGetLock(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihood", + file, fullGroupPath, "simulationLogLikelihood", gsl::make_span(&llh, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "jobId", + file, fullGroupPath, "jobId", gsl::make_span(&jobId, 1)); if (!gradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihoodGradient", + file, fullGroupPath, "simulationLogLikelihoodGradient", gradient); } else if(!parameters.empty()) { double dummyGradient[parameters.size()]; std::fill_n(dummyGradient, parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihoodGradient", + file, fullGroupPath, "simulationLogLikelihoodGradient", gsl::make_span(dummyGradient, parameters.size())); } if (!parameters.empty()) hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationParameters", parameters); + file, fullGroupPath, "simulationParameters", parameters); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationWallTimeInSec", + file, fullGroupPath, "simulationWallTimeInSec", gsl::make_span(&timeElapsedInSeconds, 1)); // TODO: This was broken by allowing different numbers of timepoints @@ -302,10 +302,10 @@ void saveSimulation(const H5::H5File &file, std::string const& pathStr, // stateSensi.size() / parameters.size(), parameters.size()); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "simulationStatus", + file, fullGroupPath, "simulationStatus", gsl::make_span(&status, 1)); - hdf5CreateOrExtendAndWriteToString1DArray(file.getId(), fullGroupPath, + hdf5CreateOrExtendAndWriteToString1DArray(file, fullGroupPath, "simulationLabel", label); file.flush(H5F_SCOPE_LOCAL); diff --git a/src/parpeamici/optimizationApplication.cpp b/src/parpeamici/optimizationApplication.cpp index 0e472448b..29ed2aa0c 100644 --- a/src/parpeamici/optimizationApplication.cpp +++ b/src/parpeamici/optimizationApplication.cpp @@ -345,6 +345,7 @@ bool OptimizationApplication::isWorker() { return getMpiRank() > 0; } OptimizationApplication::~OptimizationApplication() { // objects must be destroyed before MPI_Finalize is called // and Hdf5 mutex is destroyed + [[maybe_unused]] auto lock = hdf5MutexGetLock(); h5File.close(); problem.reset(nullptr); #ifdef PARPE_ENABLE_MPI diff --git a/src/parpeamici/simulationResultWriter.cpp b/src/parpeamici/simulationResultWriter.cpp index 84484b15e..75f633aaf 100644 --- a/src/parpeamici/simulationResultWriter.cpp +++ b/src/parpeamici/simulationResultWriter.cpp @@ -18,6 +18,12 @@ SimulationResultWriter::SimulationResultWriter(H5::H5File const& file, updatePaths(); } +SimulationResultWriter::~SimulationResultWriter() +{ + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + SimulationResultWriter::SimulationResultWriter(const std::string &hdf5FileName, std::string rootPath) diff --git a/src/parpeamici/standaloneSimulator.cpp b/src/parpeamici/standaloneSimulator.cpp index 2aeddbe10..9f279a14e 100644 --- a/src/parpeamici/standaloneSimulator.cpp +++ b/src/parpeamici/standaloneSimulator.cpp @@ -520,6 +520,8 @@ std::pair getFunctionEvaluationWithMinimalCost(std::string const& datasetPath, H5::H5File const& file) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + H5::DataSet dataset = file.openDataSet(datasetPath); H5::DataSpace filespace = dataset.getSpace(); @@ -534,7 +536,7 @@ getFunctionEvaluationWithMinimalCost(std::string const& datasetPath, std::vector cost(numFunctionEvalations, INFINITY); parpe::hdf5Read2DDoubleHyperslab( - file.getId(), datasetPath.c_str(), 1, numFunctionEvalations, 0, 0, cost); + file, datasetPath, 1, numFunctionEvalations, 0, 0, cost); int minIndex = std::min_element(cost.begin(), cost.end()) - cost.begin(); return { minIndex, cost[minIndex] }; } @@ -561,8 +563,8 @@ getParameterTrajectory(std::string const& startIndex, H5::H5File const& file) for (int iter = 0; iter < numIter; ++iter) { parameters[iter] = std::vector(numParam); - parpe::hdf5Read2DDoubleHyperslab(file.getId(), - parameterPath.c_str(), + parpe::hdf5Read2DDoubleHyperslab(file, + parameterPath, numParam, 1, 0, @@ -577,7 +579,7 @@ int getNumStarts(H5::H5File const& file, std::string const& rootPath) { auto o = parpe::OptimizationOptions::fromHDF5( - file.getId(), rootPath + "/optimizationOptions"); + file, rootPath + "/optimizationOptions"); return o->numStarts; } @@ -722,7 +724,6 @@ runNominalParameters(StandaloneSimulator& sim, [[maybe_unused]] auto lock = hdf5MutexGetLock(); H5::H5File parameterFile(parameterFileName, H5F_ACC_RDONLY); H5::H5File conditionFile(conditionFileName, H5F_ACC_RDONLY); - lock.unlock(); int errors = 0; @@ -731,9 +732,11 @@ runNominalParameters(StandaloneSimulator& sim, < parameters; for(int i = 0; i < static_cast(parameterValues.size()); ++i) diff --git a/src/parpecommon/hdf5Misc.cpp b/src/parpecommon/hdf5Misc.cpp index 65173246b..ee3c3074b 100644 --- a/src/parpecommon/hdf5Misc.cpp +++ b/src/parpecommon/hdf5Misc.cpp @@ -43,6 +43,8 @@ namespace parpe { static mutexHdfType mutexHdf; void initHDF5Mutex() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + // TODO: check if still required H5dont_atexit(); } @@ -55,8 +57,9 @@ std::unique_lock hdf5MutexGetLock() herr_t hdf5ErrorStackWalker_cb(unsigned int n, const H5E_error_t *err_desc, void* /*client_data*/) { Ensures(err_desc != nullptr); - const int indent = 2; + constexpr int indent = 2; + [[maybe_unused]] auto lock = hdf5MutexGetLock(); std::unique_ptr maj_str { H5Eget_major(err_desc->maj_num), &std::free }; std::unique_ptr @@ -108,8 +111,8 @@ void hdf5CreateGroup(const H5::H5File &file, const std::string &groupPath, bool try { auto group = file.createGroup(groupPath.c_str(), groupCreationPropertyList); } catch (H5::Exception const&) { - throw(HDF5Exception("Failed to create group in hdf5CreateGroup:" + - groupPath)); + throw HDF5Exception("Failed to create group in hdf5CreateGroup:" + + groupPath); } } @@ -118,15 +121,15 @@ void hdf5CreateExtendableDouble2DArray(const H5::H5File &file, hsize_t stride) { constexpr int rank = 2; - hsize_t initialDimensions[2] = {stride, 0}; - hsize_t maximumDimensions[2] = {stride, H5S_UNLIMITED}; + hsize_t initialDimensions[2] {stride, 0}; + hsize_t maximumDimensions[2] {stride, H5S_UNLIMITED}; std::lock_guard lock(mutexHdf); H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[2] = {stride, 1}; + hsize_t chunkDimensions[2] {stride, 1}; H5::DSetCreatPropList dSetCreatPropList; dSetCreatPropList.setChunk(rank, chunkDimensions); @@ -156,13 +159,12 @@ void hdf5Extend2ndDimensionAndWriteToDouble2DArray(const H5::H5File &file, const Expects(buffer.size() == currentDimensions[0]); - hsize_t newDimensions[2] = {currentDimensions[0], - currentDimensions[1] + 1}; + hsize_t newDimensions[2] {currentDimensions[0], currentDimensions[1] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[2] = {0, currentDimensions[1]}; - hsize_t slabsize[2] = {currentDimensions[0], 1}; + hsize_t offset[2] {0, currentDimensions[1]}; + hsize_t slabsize[2] {currentDimensions[0], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); @@ -186,18 +188,18 @@ void hdf5Extend3rdDimensionAndWriteToDouble3DArray(const H5::H5File &file, hsize_t currentDimensions[3]; filespace.getSimpleExtentDims(currentDimensions); - hsize_t newDimensions[3] = {currentDimensions[0], - currentDimensions[1], - currentDimensions[2] + 1}; + hsize_t newDimensions[3] {currentDimensions[0], + currentDimensions[1], + currentDimensions[2] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[3] = {0, 0, currentDimensions[2]}; - hsize_t slabsize[3] = {currentDimensions[0], currentDimensions[1], 1}; + hsize_t offset[3] {0, 0, currentDimensions[2]}; + hsize_t slabsize[3] {currentDimensions[0], currentDimensions[1], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); - auto memspace = H5::DataSpace(rank, slabsize); + H5::DataSpace memspace(rank, slabsize); dataset.write(buffer.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace); } @@ -207,9 +209,11 @@ void hdf5CreateOrExtendAndWriteToDouble2DArray(const H5::H5File &file, const std::string &datasetName, gsl::span buffer) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + auto fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableDouble2DArray( @@ -226,9 +230,11 @@ void hdf5CreateOrExtendAndWriteToDouble3DArray(const H5::H5File &file, gsl::span buffer, hsize_t stride1, hsize_t stride2) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + std::string fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableDouble3DArray( @@ -250,7 +256,7 @@ void hdf5CreateOrExtendAndWriteToInt2DArray(const H5::H5File &file, hdf5EnsureGroupExists(file, parentPath); - auto fullDatasetPath = std::string(parentPath) + "/" + datasetName; + auto fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableInt2DArray( @@ -279,13 +285,12 @@ void hdf5Extend2ndDimensionAndWriteToInt2DArray(const H5::H5File &file, filespace.getSimpleExtentDims(currentDimensions); Expects(buffer.size() == currentDimensions[0]); - hsize_t newDimensions[2] = {currentDimensions[0], - currentDimensions[1] + 1}; + hsize_t newDimensions[2] {currentDimensions[0], currentDimensions[1] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[2] = {0, currentDimensions[1]}; - hsize_t slabsize[2] = {currentDimensions[0], 1}; + hsize_t offset[2] {0, currentDimensions[1]}; + hsize_t slabsize[2] {currentDimensions[0], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); @@ -299,14 +304,14 @@ void hdf5CreateExtendableInt2DArray(const H5::H5File &file, { std::lock_guard lock(mutexHdf); - int rank = 2; - hsize_t initialDimensions[2] = {stride, 0}; - hsize_t maximumDimensions[2] = {stride, H5S_UNLIMITED}; + constexpr int rank = 2; + hsize_t initialDimensions[rank] {stride, 0}; + hsize_t maximumDimensions[rank] {stride, H5S_UNLIMITED}; H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[2] = {stride, 1}; + hsize_t chunkDimensions[2] {stride, 1}; H5::DSetCreatPropList datasetCreationProperty; datasetCreationProperty.setChunk(rank, chunkDimensions); @@ -322,16 +327,16 @@ void hdf5CreateExtendableDouble3DArray(const H5::H5File &file, hsize_t stride2) { - int rank = 3; - hsize_t initialDimensions[3] = {stride1, stride2, 0}; - hsize_t maximumDimensions[3] = {stride1, stride2, H5S_UNLIMITED}; + constexpr int rank = 3; + hsize_t initialDimensions[rank] {stride1, stride2, 0}; + hsize_t maximumDimensions[rank] {stride1, stride2, H5S_UNLIMITED}; std::lock_guard lock(mutexHdf); H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[3] = {stride1, stride2, 1}; + hsize_t chunkDimensions[rank] = {stride1, stride2, 1}; H5::DSetCreatPropList datasetCreationProperty; datasetCreationProperty.setChunk(rank, chunkDimensions); @@ -353,8 +358,8 @@ void hdf5Read2DDoubleHyperslab(const H5::H5File &file, auto dataset = file.openDataSet(path.c_str()); auto dataspace = dataset.getSpace(); - hsize_t offset[] = {offset0, offset1}; - hsize_t count[] = {size0, size1}; + hsize_t offset[] {offset0, offset1}; + hsize_t count[] {size0, size1}; const int ndims = dataspace.getSimpleExtentNdims(); RELEASE_ASSERT(ndims == 2 && "Only works for 2D arrays!", ""); @@ -574,9 +579,9 @@ void hdf5GetDatasetDimensions(const H5::H5File &file, const int nDimsActual = dataspace.getSimpleExtentNdims(); if(nDimsActual != (signed)nDimsExpected) - throw(HDF5Exception("Dataset rank (" + std::to_string(nDimsActual) + throw HDF5Exception("Dataset rank (" + std::to_string(nDimsActual) + ") does not match nDims argument (" - + std::to_string(nDimsExpected) + ")")); + + std::to_string(nDimsExpected) + ")"); hsize_t dims[nDimsExpected]; dataspace.getSimpleExtentDims(dims); @@ -673,9 +678,11 @@ void hdf5CreateOrExtendAndWriteToString1DArray(const H5::H5File &file, const std::string &datasetName, const std::string &buffer) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + std::string fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableString1DArray(file, fullDatasetPath); @@ -701,7 +708,7 @@ H5::H5File hdf5OpenForReading(const std::string &hdf5Filename) H5Ewalk2(H5E_DEFAULT, H5E_WALK_DOWNWARD, hdf5ErrorStackWalker_cb, nullptr); H5_RESTORE_ERROR_HANDLER; - throw(HDF5Exception("Unable to open HDF5 file " + hdf5Filename)); + throw HDF5Exception("Unable to open HDF5 file " + hdf5Filename); } } diff --git a/src/parpeoptimization/multiStartOptimization.cpp b/src/parpeoptimization/multiStartOptimization.cpp index 436173577..4eb92c56f 100644 --- a/src/parpeoptimization/multiStartOptimization.cpp +++ b/src/parpeoptimization/multiStartOptimization.cpp @@ -59,8 +59,14 @@ void MultiStartOptimization::runMultiThreaded() "Spawning thread for local optimization #%d (%d)", lastStartIdx, ms); - pthread_create(&localOptimizationThreads.at(ms), &threadAttr, - getLocalOptimumThreadWrapper, static_cast(localProblems[ms])); + auto ret = pthread_create( + &localOptimizationThreads.at(ms), &threadAttr, + getLocalOptimumThreadWrapper, + static_cast(localProblems[ms])); + if(ret) { + throw ParPEException("Failure during thread creation: " + + std::to_string(ret)); + } } int numCompleted = 0; @@ -109,9 +115,14 @@ void MultiStartOptimization::runMultiThreaded() LOGLVL_DEBUG, "Spawning thread for local optimization #%d (%d)", lastStartIdx, ms); - pthread_create(&localOptimizationThreads[ms], &threadAttr, - getLocalOptimumThreadWrapper, - static_cast(localProblems[ms])); + auto ret = pthread_create( + &localOptimizationThreads[ms], &threadAttr, + getLocalOptimumThreadWrapper, + static_cast(localProblems[ms])); + if(ret) { + throw ParPEException("Failure during thread creation: " + + std::to_string(ret)); + } } #endif delete threadStatus; diff --git a/src/parpeoptimization/optimizationOptions.cpp b/src/parpeoptimization/optimizationOptions.cpp index 455284a2e..2775c6756 100644 --- a/src/parpeoptimization/optimizationOptions.cpp +++ b/src/parpeoptimization/optimizationOptions.cpp @@ -50,13 +50,15 @@ template std::string to_string(const T &n) { } // namespace patch -void optimizationOptionsFromAttribute(H5::H5Object& loc, +void optimizationOptionsFromAttribute(H5::H5Object & loc, const H5std_string attr_name, void *op_data) { // iterate over attributes and add to OptimizationOptions auto *o = static_cast(op_data); + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto a = loc.openAttribute(attr_name); auto type = a.getDataType(); auto typeClass = a.getTypeClass(); @@ -94,9 +96,9 @@ std::unique_ptr OptimizationOptions::fromHDF5(const std::st std::unique_ptr OptimizationOptions::fromHDF5(const H5::H5File &file, std::string const& path) { auto o = std::make_unique(); - const char *hdf5path = path.c_str(); - auto fileId = file.getId(); + auto hdf5path = path.c_str(); [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto fileId = file.getId(); if (hdf5AttributeExists(file, path, "optimizer")) { int buffer; @@ -171,7 +173,7 @@ std::vector OptimizationOptions::getStartingPoint(H5::H5File const& file int index) { std::vector startingPoint; - const char *path = "/optimizationOptions/randomStarts"; + auto path = "/optimizationOptions/randomStarts"; [[maybe_unused]] auto lock = hdf5MutexGetLock(); diff --git a/src/parpeoptimization/optimizationResultWriter.cpp b/src/parpeoptimization/optimizationResultWriter.cpp index adff13792..85be40c90 100644 --- a/src/parpeoptimization/optimizationResultWriter.cpp +++ b/src/parpeoptimization/optimizationResultWriter.cpp @@ -28,7 +28,7 @@ OptimizationResultWriter::OptimizationResultWriter(const std::string &filename, rootPath(std::move(rootPath)) { logmessage(LOGLVL_DEBUG, "Writing results to %s.", filename.c_str()); - file = hdf5CreateFile(filename.c_str(), overwrite); + file = hdf5CreateFile(filename, overwrite); hdf5EnsureGroupExists(file, this->rootPath); } @@ -41,6 +41,11 @@ OptimizationResultWriter::OptimizationResultWriter( hdf5EnsureGroupExists(file, rootPath); } +OptimizationResultWriter::~OptimizationResultWriter() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + const std::string &OptimizationResultWriter::getRootPath() const { return rootPath; } @@ -60,24 +65,25 @@ void OptimizationResultWriter::logObjectiveFunctionEvaluation( int numFunctionCalls, double timeElapsedInSeconds) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); std::string pathStr = getIterationPath(numIterations); const char *fullGroupPath = pathStr.c_str(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunCost", + file, fullGroupPath, "costFunCost", gsl::make_span(&objectiveFunctionValue, 1)); if (logGradientEachFunctionEvaluation) { if (!objectiveFunctionGradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunGradient", + file, fullGroupPath, "costFunGradient", objectiveFunctionGradient); } else if (!parameters.empty()) { double dummyGradient[parameters.size()]; std::fill_n(dummyGradient, parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunGradient", + file, fullGroupPath, "costFunGradient", gsl::make_span(dummyGradient, parameters.size())); } @@ -86,15 +92,15 @@ void OptimizationResultWriter::logObjectiveFunctionEvaluation( if (logParametersEachFunctionEvaluation) if (!parameters.empty()) hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunParameters", + file, fullGroupPath, "costFunParameters", parameters); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunWallTimeInSec", + file, fullGroupPath, "costFunWallTimeInSec", gsl::make_span(&timeElapsedInSeconds, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "costFunCallIndex", + file, fullGroupPath, "costFunCallIndex", gsl::make_span(&numFunctionCalls, 1)); flushResultWriter(); @@ -111,39 +117,41 @@ void OptimizationResultWriter::logOptimizerIteration( std::string const& pathStr = getRootPath(); const char *fullGroupPath = pathStr.c_str(); + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunCost", + file, fullGroupPath, "iterCostFunCost", gsl::make_span(&objectiveFunctionValue, 1)); if (logGradientEachIteration) { if (!gradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunGradient", + file, fullGroupPath, "iterCostFunGradient", gradient); } else if (!parameters.empty()) { std::vector nanGradient(parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunGradient", + file, fullGroupPath, "iterCostFunGradient", nanGradient); } } if (!parameters.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunParameters", + file, fullGroupPath, "iterCostFunParameters", parameters); } hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunWallSec", + file, fullGroupPath, "iterCostFunWallSec", gsl::make_span(&wallSeconds, 1)); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunCpuSec", + file, fullGroupPath, "iterCostFunCpuSec", gsl::make_span(&cpuSeconds, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "iterIndex", + file, fullGroupPath, "iterIndex", gsl::make_span(&numIterations, 1)); flushResultWriter(); @@ -165,11 +173,10 @@ void OptimizationResultWriter::setLoggingEachFunctionEvaluation( void OptimizationResultWriter::starting( gsl::span initialParameters) { if (!initialParameters.empty()) { - std::string const& pathStr = getRootPath(); - const char *fullGroupPath = pathStr.c_str(); + const auto& root_path = getRootPath(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "initialParameters", + file, root_path, "initialParameters", initialParameters); flushResultWriter(); } From 72f29d96537ae0e8bb19df9fb262a173d82319ae Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 19:06:10 +0200 Subject: [PATCH 5/8] Fix HDF5 generation for state reinitialization with estimated initial conditions (#344) --- python/parpe/hdf5_pe_input.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/python/parpe/hdf5_pe_input.py b/python/parpe/hdf5_pe_input.py index c24e57bb9..ef1c83fd9 100644 --- a/python/parpe/hdf5_pe_input.py +++ b/python/parpe/hdf5_pe_input.py @@ -418,6 +418,18 @@ def _set_initial_concentration(condition_id, species_id, species_idx = state_id_to_idx[species_id] state_idxs_for_reinitialization_cur.append( species_idx) + else: + # If the state for the current species is not + # reinitialized, this parameter should never be + # used. We can set it to the same value as for + # preequilibration, to avoid issues with AMICI, + # where we cannot provide different values for + # dynamic parameter for preequilibration and + # simulation. + condition_map_sim[init_par_id] = \ + condition_map_preeq[init_par_id] + condition_scale_map_sim[init_par_id] = \ + condition_scale_map_preeq[init_par_id] # for simulation init_par_id = f'initial_{species_id}_sim' From 46589e5592cebcd6296c476c5caac3c69b61fc19 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 19:52:18 +0200 Subject: [PATCH 6/8] Cleanup PEtab import (#346) --- python/parpe/hdf5_pe_input.py | 41 +++++++++++++++++------------------ 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/python/parpe/hdf5_pe_input.py b/python/parpe/hdf5_pe_input.py index ef1c83fd9..3f6471c1b 100644 --- a/python/parpe/hdf5_pe_input.py +++ b/python/parpe/hdf5_pe_input.py @@ -389,24 +389,22 @@ def _set_initial_concentration(condition_id, species_id, # for preequilibration init_par_id = f'initial_{species_id}_preeq' - # need to set dummy value for preeq parameter anyways, as it - # is expected below (set to 0, not nan, because will be - # multiplied with indicator variable in initial assignment) + # preeq initial parameter is always added during PEtab + # import, independently of whether preeq is used. Need to + # set dummy value for preeq parameter anyways, as + # it is expected in parameter mapping below + # (set to 0, not nan, because will be multiplied with + # indicator variable in initial assignment) condition_map_sim[init_par_id] = 0.0 condition_scale_map_sim[init_par_id] = ptc.LIN if preeq_cond_idx != NO_PREEQ_CONDITION_IDX: - value = petab.to_float_if_float( - self.petab_problem.condition_df.loc[ - preeq_cond_id, species_id]) - - if isinstance(value, Number): - condition_map_sim[init_par_id] = value _set_initial_concentration( preeq_cond_id, species_id, init_par_id, condition_map_preeq, condition_scale_map_preeq) + # Check if reinitialization is requested value_sim = petab.to_float_if_float( self.petab_problem.condition_df.loc[ sim_cond_id, species_id]) @@ -418,18 +416,19 @@ def _set_initial_concentration(condition_id, species_id, species_idx = state_id_to_idx[species_id] state_idxs_for_reinitialization_cur.append( species_idx) - else: - # If the state for the current species is not - # reinitialized, this parameter should never be - # used. We can set it to the same value as for - # preequilibration, to avoid issues with AMICI, - # where we cannot provide different values for - # dynamic parameter for preequilibration and - # simulation. - condition_map_sim[init_par_id] = \ - condition_map_preeq[init_par_id] - condition_scale_map_sim[init_par_id] = \ - condition_scale_map_preeq[init_par_id] + + # Set the preequilibration value also for simulation. + # Either it will be overwritten in the next step, + # or it will not be used anywhere. + # Setting it to the same value as for + # preequilibration avoids issues with AMICI, + # where we cannot provide different values for + # dynamic parameter for preequilibration and + # simulation. + condition_map_sim[init_par_id] = \ + condition_map_preeq[init_par_id] + condition_scale_map_sim[init_par_id] = \ + condition_scale_map_preeq[init_par_id] # for simulation init_par_id = f'initial_{species_id}_sim' From e5e4fb3444ef8590aa0117b97d64c99f5a6ba2a3 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 20:31:50 +0200 Subject: [PATCH 7/8] Update AMICI to develop@8ef53c88 (#347) git subrepo clone --branch=develop --force git@github.com:AMICI-dev/AMICI.git deps/AMICI subrepo: subdir: "deps/AMICI" merged: "8ef53c88" upstream: origin: "git@github.com:AMICI-dev/AMICI.git" branch: "develop" commit: "8ef53c88" git-subrepo: version: "0.4.1" origin: "https://github.com/ingydotnet/git-subrepo" commit: "a04d8c2" --- .../.github/workflows/deploy_release.yml | 25 +++++++++++++++ .../workflows/release_biosimulators.yml | 32 ------------------- deps/AMICI/.github/workflows/test_windows.yml | 2 +- deps/AMICI/.gitrepo | 6 ++-- deps/AMICI/include/amici/exception.h | 23 ++++++++++++- deps/AMICI/python/amici/__init__.py | 15 +++++++-- deps/AMICI/python/amici/petab_import.py | 7 +++- deps/AMICI/python/amici/petab_objective.py | 21 +++++++++--- deps/AMICI/src/CMakeLists.template.cmake | 7 ++-- deps/AMICI/src/exception.cpp | 25 +++++++++++++-- 10 files changed, 113 insertions(+), 50 deletions(-) delete mode 100644 deps/AMICI/.github/workflows/release_biosimulators.yml diff --git a/deps/AMICI/.github/workflows/deploy_release.yml b/deps/AMICI/.github/workflows/deploy_release.yml index 57eacbe91..543ad17ca 100644 --- a/deps/AMICI/.github/workflows/deploy_release.yml +++ b/deps/AMICI/.github/workflows/deploy_release.yml @@ -33,3 +33,28 @@ jobs: password: ${{ secrets.pypi_password }} packages_dir: python/sdist/dist + bioSimulatorsUpdateCliAndDockerImage: + name: Release to BioSimulators + needs: pypi + runs-on: ubuntu-latest + env: + # Owner/repository-id for the GitHub repository for the downstream command-line interface and Docker image + DOWNSTREAM_REPOSITORY: biosimulators/Biosimulators_AMICI + + # Username/token to use the GitHub API to trigger an action on the GitHub repository for the downstream + # command-line interface and Docker image. Tokens can be generated at https://github.com/settings/tokens. + # The token should have the scope `repo` + GH_ISSUE_USERNAME: ${{ secrets.BIOSIMULATORS_USERNAME }} + GH_ISSUE_TOKEN: ${{ secrets.BIOSIMULATORS_TOKEN }} + steps: + - name: Trigger GitHub action that will build and release the downstream command-line interface and Docker image + run: | + PACKAGE_VERSION="${GITHUB_REF/refs\/tags\/v/}" + WORKFLOW_FILE=ci.yml + + curl \ + -X POST \ + -u ${GH_ISSUE_USERNAME}:${GH_ISSUE_TOKEN} \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/repos/${DOWNSTREAM_REPOSITORY}/actions/workflows/${WORKFLOW_FILE}/dispatches \ + -d "{\"ref\": \"dev\", \"inputs\": {\"simulatorVersion\": \"${PACKAGE_VERSION}\", \"simulatorVersionLatest\": \"true\"}}" diff --git a/deps/AMICI/.github/workflows/release_biosimulators.yml b/deps/AMICI/.github/workflows/release_biosimulators.yml deleted file mode 100644 index 53d5a5559..000000000 --- a/deps/AMICI/.github/workflows/release_biosimulators.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Release to BioSimulators - -on: - release: - types: - - published - -jobs: - updateCliAndDockerImage: - name: Build and release downstream command-line interface and Docker image - runs-on: ubuntu-latest - env: - # Owner/repository-id for the GitHub repository for the downstream command-line interface and Docker image - DOWNSTREAM_REPOSITORY: biosimulators/Biosimulators_AMICI - - # Username/token to use the GitHub API to trigger an action on the GitHub repository for the downstream - # command-line interface and Docker image. Tokens can be generated at https://github.com/settings/tokens. - # The token should have the scope `repo` - GH_ISSUE_USERNAME: ${{ secrets.BIOSIMULATORS_USERNAME }} - GH_ISSUE_TOKEN: ${{ secrets.BIOSIMULATORS_TOKEN }} - steps: - - name: Trigger GitHub action that will build and release the downstream command-line interface and Docker image - run: | - PACKAGE_VERSION="${GITHUB_REF/refs\/tags\/v/}" - WORKFLOW_FILE=ci.yml - - curl \ - -X POST \ - -u ${GH_ISSUE_USERNAME}:${GH_ISSUE_TOKEN} \ - -H "Accept: application/vnd.github.v3+json" \ - https://api.github.com/repos/${DOWNSTREAM_REPOSITORY}/actions/workflows/${WORKFLOW_FILE}/dispatches \ - -d "{\"ref\": \"dev\", \"inputs\": {\"simulatorVersion\": \"${PACKAGE_VERSION}\", \"simulatorVersionLatest\": \"true\"}}" diff --git a/deps/AMICI/.github/workflows/test_windows.yml b/deps/AMICI/.github/workflows/test_windows.yml index 50a2cd1c4..bf6c8d80d 100644 --- a/deps/AMICI/.github/workflows/test_windows.yml +++ b/deps/AMICI/.github/workflows/test_windows.yml @@ -33,7 +33,7 @@ jobs: python -m pip install --upgrade pip \ && pip install pytest petab \ && choco install -y ninja \ - && choco install -y swig --version=4.0.1 + && choco install -y swig - name: Install OpenBLAS shell: powershell diff --git a/deps/AMICI/.gitrepo b/deps/AMICI/.gitrepo index 20b7889ce..2cefac44d 100644 --- a/deps/AMICI/.gitrepo +++ b/deps/AMICI/.gitrepo @@ -5,8 +5,8 @@ ; [subrepo] remote = git@github.com:ICB-DCM/AMICI.git - branch = v0.11.17 - commit = c0377065207b68c881143d203de1f56fd52878b9 - parent = d0bd8c19b9bb5426affdcd8386d893eee7940e80 + branch = develop + commit = 8ef53c8808437cafc171d6ab9545a59d61bb83af + parent = eb33a10abde3faf2835ce846482817df06afcfe1 cmdver = 0.4.1 method = merge diff --git a/deps/AMICI/include/amici/exception.h b/deps/AMICI/include/amici/exception.h index 6556e5fcd..5d767fd41 100644 --- a/deps/AMICI/include/amici/exception.h +++ b/deps/AMICI/include/amici/exception.h @@ -15,6 +15,13 @@ namespace amici { */ class AmiException : public std::exception { public: + /** + * @brief Constructor with printf style interface + * @param fmt error message with printf format + * @param ... printf formatting variables + */ + AmiException(); + /** * @brief Constructor with printf style interface * @param fmt error message with printf format @@ -40,6 +47,14 @@ class AmiException : public std::exception { */ void storeBacktrace(int nMaxFrames); + protected: + /** + * @brief Store the provided message + * @param fmt error message with printf format + * @param argptr pointer to variadic argument list + */ + void storeMessage(const char *fmt, va_list argptr); + private: std::array msg_; std::array trace_; @@ -131,7 +146,13 @@ class IntegrationFailureB : public AmiException { */ class SetupFailure : public AmiException { public: - using AmiException::AmiException; + /** + * @brief Constructor with printf style interface + * @param fmt error message with printf format + * @param ... printf formatting variables + */ + explicit SetupFailure(char const* fmt, ...); + }; diff --git a/deps/AMICI/python/amici/__init__.py b/deps/AMICI/python/amici/__init__.py index bb474d89d..be8ad826c 100644 --- a/deps/AMICI/python/amici/__init__.py +++ b/deps/AMICI/python/amici/__init__.py @@ -28,7 +28,7 @@ import re import sys from contextlib import suppress -from types import ModuleType +from types import ModuleType as ModelModule from typing import Optional, Union, Sequence, List @@ -134,6 +134,17 @@ def _imported_from_setup() -> bool: from .sbml_import import SbmlImporter, assignmentRules2observables from .ode_export import ODEModel, ODEExporter + try: + # Requires Python>=3.8 + from typing import Protocol + + class ModelModule(Protocol): + """Enable Python static type checking for AMICI-generated model modules""" + def getModel(self) -> amici.Model: + pass + except ImportError: + pass + hdf5_enabled = 'readSolverSettingsFromHDF5' in dir() @@ -283,7 +294,7 @@ def __exit__(self, exc_type, exc_value, traceback): def import_model_module(module_name: str, - module_path: Optional[str] = None) -> ModuleType: + module_path: Optional[str] = None) -> ModelModule: """ Import Python module of an AMICI model diff --git a/deps/AMICI/python/amici/petab_import.py b/deps/AMICI/python/amici/petab_import.py index bfbbf744a..4f8ad93c9 100644 --- a/deps/AMICI/python/amici/petab_import.py +++ b/deps/AMICI/python/amici/petab_import.py @@ -378,7 +378,7 @@ def import_model_sbml( model_output_dir: Optional[str] = None, verbose: Optional[Union[bool, int]] = True, allow_reinit_fixpar_initcond: bool = True, - **kwargs) -> None: + **kwargs) -> amici.SbmlImporter: """ Create AMICI model from PEtab problem @@ -415,6 +415,9 @@ def import_model_sbml( :param kwargs: Additional keyword arguments to be passed to :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. + + :return: + The created :class:`amici.sbml_import.SbmlImporter` instance. """ set_log_level(logger, verbose) @@ -584,6 +587,8 @@ def import_model_sbml( verbose=verbose, **kwargs) + return sbml_importer + # for backwards compatibility import_model = import_model_sbml diff --git a/deps/AMICI/python/amici/petab_objective.py b/deps/AMICI/python/amici/petab_objective.py index 6ae8ab0ca..aadcbd496 100644 --- a/deps/AMICI/python/amici/petab_objective.py +++ b/deps/AMICI/python/amici/petab_objective.py @@ -17,6 +17,7 @@ import numpy as np import pandas as pd import petab +import sympy as sp from petab.C import * # noqa: F403 from . import AmiciModel, AmiciExpData @@ -346,11 +347,21 @@ def _set_initial_concentration(condition_id, species_id, init_par_id, value = petab.to_float_if_float( petab_problem.condition_df.loc[condition_id, species_id]) if pd.isna(value): - value = float( - get_species_initial( - petab_problem.sbml_model.getSpecies(species_id) - ) + value = get_species_initial( + petab_problem.sbml_model.getSpecies(species_id) ) + try: + value = float(value) + except ValueError: + if sp.nsimplify(value).is_Atom: + # Get rid of multiplication with one + value = sp.nsimplify(value) + else: + raise NotImplementedError( + "Cannot handle non-trivial expressions for " + f"species initial for {species_id}: {value}") + # this should be a parameter ID + value = str(value) logger.debug(f'The species {species_id} has no initial value ' f'defined for the condition {condition_id} in ' 'the PEtab conditions table. The initial value is ' @@ -728,4 +739,4 @@ def rdatas_to_simulation_df( df = rdatas_to_measurement_df(rdatas=rdatas, model=model, measurement_df=measurement_df) - return df.rename(columns={MEASUREMENT: SIMULATION}) \ No newline at end of file + return df.rename(columns={MEASUREMENT: SIMULATION}) diff --git a/deps/AMICI/src/CMakeLists.template.cmake b/deps/AMICI/src/CMakeLists.template.cmake index a89fc8294..6aa13400b 100644 --- a/deps/AMICI/src/CMakeLists.template.cmake +++ b/deps/AMICI/src/CMakeLists.template.cmake @@ -18,10 +18,13 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_POSITION_INDEPENDENT_CODE ON) include(CheckCXXCompilerFlag) -set(MY_CXX_FLAGS -Wall -Wno-unused-function -Wno-unused-variable -Wno-unused-but-set-variable) +set(MY_CXX_FLAGS -Wall -Wno-unused-function -Wno-unused-variable) +if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + list(APPEND MY_CXX_FLAGS -Wno-unused-but-set-variable) +endif() foreach(FLAG ${MY_CXX_FLAGS}) unset(CUR_FLAG_SUPPORTED CACHE) - CHECK_CXX_COMPILER_FLAG(${FLAG} CUR_FLAG_SUPPORTED) + CHECK_CXX_COMPILER_FLAG(-Werror ${FLAG} CUR_FLAG_SUPPORTED) if(${CUR_FLAG_SUPPORTED}) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}") endif() diff --git a/deps/AMICI/src/exception.cpp b/deps/AMICI/src/exception.cpp index 97676beab..2d1f72ecd 100644 --- a/deps/AMICI/src/exception.cpp +++ b/deps/AMICI/src/exception.cpp @@ -8,12 +8,18 @@ namespace amici { -AmiException::AmiException(const char *fmt, ...) { +AmiException::AmiException() +{ + storeBacktrace(12); +} + +AmiException::AmiException(const char *fmt, ...) + : AmiException() +{ va_list ap; va_start(ap, fmt); - vsnprintf(msg_.data(), msg_.size(), fmt, ap); + storeMessage(fmt, ap); va_end(ap); - storeBacktrace(12); } const char *AmiException::what() const noexcept { @@ -29,6 +35,11 @@ void AmiException::storeBacktrace(const int nMaxFrames) { backtraceString(nMaxFrames).c_str()); } +void AmiException::storeMessage(const char *fmt, va_list argptr) +{ + vsnprintf(msg_.data(), msg_.size(), fmt, argptr); +} + CvodeException::CvodeException(const int error_code, const char *function) : AmiException("Cvode routine %s failed with error code %i",function,error_code){} @@ -48,4 +59,12 @@ NewtonFailure::NewtonFailure(int code, const char *function) : error_code = code; } +SetupFailure::SetupFailure(const char *fmt, ...) +{ + va_list ap; + va_start(ap, fmt); + storeMessage(fmt, ap); + va_end(ap); +} + } // namespace amici From 98a0746a708be9e43c7daeb54ec38bcb6709f7d9 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 22:29:15 +0200 Subject: [PATCH 8/8] Fixup AMICI https://github.com/AMICI-dev/AMICI/pull/1522 --- deps/AMICI/python/amici/petab_objective.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/AMICI/python/amici/petab_objective.py b/deps/AMICI/python/amici/petab_objective.py index aadcbd496..13e33d1fb 100644 --- a/deps/AMICI/python/amici/petab_objective.py +++ b/deps/AMICI/python/amici/petab_objective.py @@ -352,7 +352,7 @@ def _set_initial_concentration(condition_id, species_id, init_par_id, ) try: value = float(value) - except ValueError: + except (ValueError, TypeError): if sp.nsimplify(value).is_Atom: # Get rid of multiplication with one value = sp.nsimplify(value)